Version 3.23.5

Fixed missing type feedback check for Generic*String addition. (Chromium issue 318671)

Fixed duplicate check in DependentCode::Insert. (Chromium issue 318454)

Performance and stability improvements on all platforms.

git-svn-id: http://v8.googlecode.com/svn/trunk@17777 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
diff --git a/ChangeLog b/ChangeLog
index 5a80034..c6d59ef 100644
--- a/ChangeLog
+++ b/ChangeLog
@@ -1,3 +1,14 @@
+2013-11-15: Version 3.23.5
+
+        Fixed missing type feedback check for Generic*String addition.
+        (Chromium issue 318671)
+
+        Fixed duplicate check in DependentCode::Insert.
+        (Chromium issue 318454)
+
+        Performance and stability improvements on all platforms.
+
+
 2013-11-14: Version 3.23.4
 
         Fixed overflow in TypedArray initialization function.
diff --git a/PRESUBMIT.py b/PRESUBMIT.py
index 75e16e3..fe15157 100644
--- a/PRESUBMIT.py
+++ b/PRESUBMIT.py
@@ -69,15 +69,28 @@
   return input_api.environ.get('PRESUBMIT_TREE_CHECK') == 'skip'
 
 
+def _CheckChangeLogFlag(input_api, output_api):
+  """Checks usage of LOG= flag in the commit message."""
+  results = []
+  if input_api.change.BUG and not 'LOG' in input_api.change.tags:
+    results.append(output_api.PresubmitError(
+        'An issue reference (BUG=) requires a change log flag (LOG=). '
+        'Use LOG=Y for including this commit message in the change log. '
+        'Use LOG=N or leave blank otherwise.'))
+  return results
+
+
 def CheckChangeOnUpload(input_api, output_api):
   results = []
   results.extend(_CommonChecks(input_api, output_api))
+  results.extend(_CheckChangeLogFlag(input_api, output_api))
   return results
 
 
 def CheckChangeOnCommit(input_api, output_api):
   results = []
   results.extend(_CommonChecks(input_api, output_api))
+  results.extend(_CheckChangeLogFlag(input_api, output_api))
   results.extend(input_api.canned_checks.CheckChangeHasDescription(
       input_api, output_api))
   if not _SkipTreeCheck(input_api, output_api):
diff --git a/include/v8.h b/include/v8.h
index d6678c5..f0b627e 100644
--- a/include/v8.h
+++ b/include/v8.h
@@ -5246,19 +5246,11 @@
 
   ~Locker();
 
-  /**
-   * Start preemption.
-   *
-   * When preemption is started, a timer is fired every n milliseconds
-   * that will switch between multiple threads that are in contention
-   * for the V8 lock.
-   */
-  static void StartPreemption(Isolate* isolate, int every_n_ms);
+  V8_DEPRECATED("This will be remvoed.",
+                static void StartPreemption(Isolate *isolate, int every_n_ms));
 
-  /**
-   * Stop preemption.
-   */
-  static void StopPreemption(Isolate* isolate);
+  V8_DEPRECATED("This will be removed",
+                static void StopPreemption(Isolate* isolate));
 
   /**
    * Returns whether or not the locker for a given isolate, is locked by the
diff --git a/src/allocation-tracker.cc b/src/allocation-tracker.cc
new file mode 100644
index 0000000..586ce3c
--- /dev/null
+++ b/src/allocation-tracker.cc
@@ -0,0 +1,279 @@
+// Copyright 2013 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+//       notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+//       copyright notice, this list of conditions and the following
+//       disclaimer in the documentation and/or other materials provided
+//       with the distribution.
+//     * Neither the name of Google Inc. nor the names of its
+//       contributors may be used to endorse or promote products derived
+//       from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+#include "v8.h"
+
+#include "allocation-tracker.h"
+
+#include "heap-snapshot-generator.h"
+#include "frames-inl.h"
+
+namespace v8 {
+namespace internal {
+
+AllocationTraceNode::AllocationTraceNode(
+    AllocationTraceTree* tree, SnapshotObjectId shared_function_info_id)
+    : tree_(tree),
+      function_id_(shared_function_info_id),
+      total_size_(0),
+      allocation_count_(0),
+      id_(tree->next_node_id()) {
+}
+
+
+AllocationTraceNode::~AllocationTraceNode() {
+}
+
+
+AllocationTraceNode* AllocationTraceNode::FindChild(SnapshotObjectId id) {
+  for (int i = 0; i < children_.length(); i++) {
+    AllocationTraceNode* node = children_[i];
+    if (node->function_id() == id) return node;
+  }
+  return NULL;
+}
+
+
+AllocationTraceNode* AllocationTraceNode::FindOrAddChild(SnapshotObjectId id) {
+  AllocationTraceNode* child = FindChild(id);
+  if (child == NULL) {
+    child = new AllocationTraceNode(tree_, id);
+    children_.Add(child);
+  }
+  return child;
+}
+
+
+void AllocationTraceNode::AddAllocation(unsigned size) {
+  total_size_ += size;
+  ++allocation_count_;
+}
+
+
+void AllocationTraceNode::Print(int indent, AllocationTracker* tracker) {
+  OS::Print("%10u %10u %*c", total_size_, allocation_count_, indent, ' ');
+  if (tracker != NULL) {
+    const char* name = "<unknown function>";
+    if (function_id_ != 0) {
+      AllocationTracker::FunctionInfo* info =
+          tracker->GetFunctionInfo(function_id_);
+      if (info != NULL) {
+        name = info->name;
+      }
+    }
+    OS::Print("%s #%u", name, id_);
+  } else {
+    OS::Print("%u #%u", function_id_, id_);
+  }
+  OS::Print("\n");
+  indent += 2;
+  for (int i = 0; i < children_.length(); i++) {
+    children_[i]->Print(indent, tracker);
+  }
+}
+
+
+AllocationTraceTree::AllocationTraceTree()
+    : next_node_id_(1),
+      root_(this, 0) {
+}
+
+
+AllocationTraceTree::~AllocationTraceTree() {
+}
+
+
+AllocationTraceNode* AllocationTraceTree::AddPathFromEnd(
+    const Vector<SnapshotObjectId>& path) {
+  AllocationTraceNode* node = root();
+  for (SnapshotObjectId* entry = path.start() + path.length() - 1;
+       entry != path.start() - 1;
+       --entry) {
+    node = node->FindOrAddChild(*entry);
+  }
+  return node;
+}
+
+
+void AllocationTraceTree::Print(AllocationTracker* tracker) {
+  OS::Print("[AllocationTraceTree:]\n");
+  OS::Print("Total size | Allocation count | Function id | id\n");
+  root()->Print(0, tracker);
+}
+
+void AllocationTracker::DeleteUnresolvedLocation(
+    UnresolvedLocation** location) {
+  delete *location;
+}
+
+
+AllocationTracker::FunctionInfo::FunctionInfo()
+    : name(""),
+      script_name(""),
+      script_id(0),
+      line(-1),
+      column(-1) {
+}
+
+
+static bool AddressesMatch(void* key1, void* key2) {
+  return key1 == key2;
+}
+
+
+AllocationTracker::AllocationTracker(
+    HeapObjectsMap* ids, StringsStorage* names)
+    : ids_(ids),
+      names_(names),
+      id_to_function_info_(AddressesMatch) {
+}
+
+
+AllocationTracker::~AllocationTracker() {
+  unresolved_locations_.Iterate(DeleteUnresolvedLocation);
+}
+
+
+void AllocationTracker::PrepareForSerialization() {
+  List<UnresolvedLocation*> copy(unresolved_locations_.length());
+  copy.AddAll(unresolved_locations_);
+  unresolved_locations_.Clear();
+  for (int i = 0; i < copy.length(); i++) {
+    copy[i]->Resolve();
+    delete copy[i];
+  }
+}
+
+
+void AllocationTracker::NewObjectEvent(Address addr, int size) {
+  DisallowHeapAllocation no_allocation;
+  Heap* heap = ids_->heap();
+
+  // Mark the new block as FreeSpace to make sure the heap is iterable
+  // while we are capturing stack trace.
+  FreeListNode::FromAddress(addr)->set_size(heap, size);
+  ASSERT_EQ(HeapObject::FromAddress(addr)->Size(), size);
+  ASSERT(FreeListNode::IsFreeListNode(HeapObject::FromAddress(addr)));
+
+  Isolate* isolate = heap->isolate();
+  int length = 0;
+  StackTraceFrameIterator it(isolate);
+  while (!it.done() && length < kMaxAllocationTraceLength) {
+    JavaScriptFrame* frame = it.frame();
+    SharedFunctionInfo* shared = frame->function()->shared();
+    SnapshotObjectId id = ids_->FindEntry(shared->address());
+    allocation_trace_buffer_[length++] = id;
+    AddFunctionInfo(shared, id);
+    it.Advance();
+  }
+  AllocationTraceNode* top_node = trace_tree_.AddPathFromEnd(
+      Vector<SnapshotObjectId>(allocation_trace_buffer_, length));
+  top_node->AddAllocation(size);
+}
+
+
+static uint32_t SnapshotObjectIdHash(SnapshotObjectId id) {
+  return ComputeIntegerHash(static_cast<uint32_t>(id),
+                            v8::internal::kZeroHashSeed);
+}
+
+
+AllocationTracker::FunctionInfo* AllocationTracker::GetFunctionInfo(
+    SnapshotObjectId id) {
+  HashMap::Entry* entry = id_to_function_info_.Lookup(
+      reinterpret_cast<void*>(id), SnapshotObjectIdHash(id), false);
+  if (entry == NULL) {
+    return NULL;
+  }
+  return reinterpret_cast<FunctionInfo*>(entry->value);
+}
+
+
+void AllocationTracker::AddFunctionInfo(SharedFunctionInfo* shared,
+                                        SnapshotObjectId id) {
+  HashMap::Entry* entry = id_to_function_info_.Lookup(
+      reinterpret_cast<void*>(id), SnapshotObjectIdHash(id), true);
+  if (entry->value == NULL) {
+    FunctionInfo* info = new FunctionInfo();
+    info->name = names_->GetFunctionName(shared->DebugName());
+    if (shared->script()->IsScript()) {
+      Script* script = Script::cast(shared->script());
+      if (script->name()->IsName()) {
+        Name* name = Name::cast(script->name());
+        info->script_name = names_->GetName(name);
+      }
+      info->script_id = script->id()->value();
+      // Converting start offset into line and column may cause heap
+      // allocations so we postpone them until snapshot serialization.
+      unresolved_locations_.Add(new UnresolvedLocation(
+          script,
+          shared->start_position(),
+          info));
+    }
+    entry->value = info;
+  }
+}
+
+
+AllocationTracker::UnresolvedLocation::UnresolvedLocation(
+    Script* script, int start, FunctionInfo* info)
+    : start_position_(start),
+      info_(info) {
+  script_ = Handle<Script>::cast(
+      script->GetIsolate()->global_handles()->Create(script));
+  GlobalHandles::MakeWeak(
+      reinterpret_cast<Object**>(script_.location()),
+      this, &HandleWeakScript);
+}
+
+
+AllocationTracker::UnresolvedLocation::~UnresolvedLocation() {
+  if (!script_.is_null()) {
+    script_->GetIsolate()->global_handles()->Destroy(
+        reinterpret_cast<Object**>(script_.location()));
+  }
+}
+
+
+void AllocationTracker::UnresolvedLocation::Resolve() {
+  if (script_.is_null()) return;
+  info_->line = GetScriptLineNumber(script_, start_position_);
+  info_->column = GetScriptColumnNumber(script_, start_position_);
+}
+
+
+void AllocationTracker::UnresolvedLocation::HandleWeakScript(
+    v8::Isolate* isolate,
+    v8::Persistent<v8::Value>* obj,
+    void* data) {
+  UnresolvedLocation* location = reinterpret_cast<UnresolvedLocation*>(data);
+  location->script_ = Handle<Script>::null();
+  obj->Dispose();
+}
+
+
+} }  // namespace v8::internal
diff --git a/src/allocation-tracker.h b/src/allocation-tracker.h
new file mode 100644
index 0000000..617cf90
--- /dev/null
+++ b/src/allocation-tracker.h
@@ -0,0 +1,138 @@
+// Copyright 2013 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+//       notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+//       copyright notice, this list of conditions and the following
+//       disclaimer in the documentation and/or other materials provided
+//       with the distribution.
+//     * Neither the name of Google Inc. nor the names of its
+//       contributors may be used to endorse or promote products derived
+//       from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+#ifndef V8_ALLOCATION_TRACKER_H_
+#define V8_ALLOCATION_TRACKER_H_
+
+namespace v8 {
+namespace internal {
+
+class HeapObjectsMap;
+
+class AllocationTraceTree;
+
+class AllocationTraceNode {
+ public:
+  AllocationTraceNode(AllocationTraceTree* tree,
+                      SnapshotObjectId shared_function_info_id);
+  ~AllocationTraceNode();
+  AllocationTraceNode* FindChild(SnapshotObjectId shared_function_info_id);
+  AllocationTraceNode* FindOrAddChild(SnapshotObjectId shared_function_info_id);
+  void AddAllocation(unsigned size);
+
+  SnapshotObjectId function_id() const { return function_id_; }
+  unsigned allocation_size() const { return total_size_; }
+  unsigned allocation_count() const { return allocation_count_; }
+  unsigned id() const { return id_; }
+  Vector<AllocationTraceNode*> children() const { return children_.ToVector(); }
+
+  void Print(int indent, AllocationTracker* tracker);
+
+ private:
+  AllocationTraceTree* tree_;
+  SnapshotObjectId function_id_;
+  unsigned total_size_;
+  unsigned allocation_count_;
+  unsigned id_;
+  List<AllocationTraceNode*> children_;
+
+  DISALLOW_COPY_AND_ASSIGN(AllocationTraceNode);
+};
+
+
+class AllocationTraceTree {
+ public:
+  AllocationTraceTree();
+  ~AllocationTraceTree();
+  AllocationTraceNode* AddPathFromEnd(const Vector<SnapshotObjectId>& path);
+  AllocationTraceNode* root() { return &root_; }
+  unsigned next_node_id() { return next_node_id_++; }
+  void Print(AllocationTracker* tracker);
+
+ private:
+  unsigned next_node_id_;
+  AllocationTraceNode root_;
+
+  DISALLOW_COPY_AND_ASSIGN(AllocationTraceTree);
+};
+
+
+class AllocationTracker {
+ public:
+  struct FunctionInfo {
+    FunctionInfo();
+    const char* name;
+    const char* script_name;
+    int script_id;
+    int line;
+    int column;
+  };
+
+  AllocationTracker(HeapObjectsMap* ids, StringsStorage* names);
+  ~AllocationTracker();
+
+  void PrepareForSerialization();
+  void NewObjectEvent(Address addr, int size);
+
+  AllocationTraceTree* trace_tree() { return &trace_tree_; }
+  HashMap* id_to_function_info() { return &id_to_function_info_; }
+  FunctionInfo* GetFunctionInfo(SnapshotObjectId id);
+
+ private:
+  void AddFunctionInfo(SharedFunctionInfo* info, SnapshotObjectId id);
+
+  class UnresolvedLocation {
+   public:
+    UnresolvedLocation(Script* script, int start, FunctionInfo* info);
+    ~UnresolvedLocation();
+    void Resolve();
+
+   private:
+    static void HandleWeakScript(v8::Isolate* isolate,
+                                 v8::Persistent<v8::Value>* obj,
+                                 void* data);
+    Handle<Script> script_;
+    int start_position_;
+    FunctionInfo* info_;
+  };
+  static void DeleteUnresolvedLocation(UnresolvedLocation** location);
+
+  static const int kMaxAllocationTraceLength = 64;
+  HeapObjectsMap* ids_;
+  StringsStorage* names_;
+  AllocationTraceTree trace_tree_;
+  SnapshotObjectId allocation_trace_buffer_[kMaxAllocationTraceLength];
+  HashMap id_to_function_info_;
+  List<UnresolvedLocation*> unresolved_locations_;
+
+  DISALLOW_COPY_AND_ASSIGN(AllocationTracker);
+};
+
+} }  // namespace v8::internal
+
+#endif  // V8_ALLOCATION_TRACKER_H_
+
diff --git a/src/api.cc b/src/api.cc
index 4a405a9..736f0b0 100644
--- a/src/api.cc
+++ b/src/api.cc
@@ -6132,8 +6132,10 @@
 
   ASSERT(byte_offset % sizeof(ElementType) == 0);
 
+  CHECK(length <= (std::numeric_limits<size_t>::max() / sizeof(ElementType)));
+  size_t byte_length = length * sizeof(ElementType);
   SetupArrayBufferView(
-      isolate, obj, buffer, byte_offset, length * sizeof(ElementType));
+      isolate, obj, buffer, byte_offset, byte_length);
 
   i::Handle<i::Object> length_object =
     isolate->factory()->NewNumberFromSize(length);
diff --git a/src/arm/code-stubs-arm.cc b/src/arm/code-stubs-arm.cc
index 94e114f..8db65bc 100644
--- a/src/arm/code-stubs-arm.cc
+++ b/src/arm/code-stubs-arm.cc
@@ -5850,11 +5850,13 @@
       __ ldr(r5, FieldMemOperand(r2, Cell::kValueOffset));
     }
 
-    // Save the resulting elements kind in type info
-    __ SmiTag(r3);
-    __ ldr(r5, FieldMemOperand(r2, Cell::kValueOffset));
-    __ str(r3, FieldMemOperand(r5, AllocationSite::kTransitionInfoOffset));
-    __ SmiUntag(r3);
+    // Save the resulting elements kind in type info. We can't just store r3
+    // in the AllocationSite::transition_info field because elements kind is
+    // restricted to a portion of the field...upper bits need to be left alone.
+    STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
+    __ ldr(r4, FieldMemOperand(r5, AllocationSite::kTransitionInfoOffset));
+    __ add(r4, r4, Operand(Smi::FromInt(kFastElementsKindPackedToHoley)));
+    __ str(r4, FieldMemOperand(r5, AllocationSite::kTransitionInfoOffset));
 
     __ bind(&normal_sequence);
     int last_index = GetSequenceIndexFromFastElementsKind(
@@ -5996,6 +5998,8 @@
 
   __ ldr(r3, FieldMemOperand(r3, AllocationSite::kTransitionInfoOffset));
   __ SmiUntag(r3);
+  STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
+  __ and_(r3, r3, Operand(AllocationSite::ElementsKindBits::kMask));
   GenerateDispatchToArrayStub(masm, DONT_OVERRIDE);
 
   __ bind(&no_info);
diff --git a/src/arm/ic-arm.cc b/src/arm/ic-arm.cc
index 025a590..a3b2a6e 100644
--- a/src/arm/ic-arm.cc
+++ b/src/arm/ic-arm.cc
@@ -1432,10 +1432,10 @@
   __ JumpIfSmi(receiver, &slow);
   // Get the map of the object.
   __ ldr(receiver_map, FieldMemOperand(receiver, HeapObject::kMapOffset));
-  // Check that the receiver does not require access checks.  We need
-  // to do this because this generic stub does not perform map checks.
+  // Check that the receiver does not require access checks and is not observed.
+  // The generic stub does not perform map checks or handle observed objects.
   __ ldrb(ip, FieldMemOperand(receiver_map, Map::kBitFieldOffset));
-  __ tst(ip, Operand(1 << Map::kIsAccessCheckNeeded));
+  __ tst(ip, Operand(1 << Map::kIsAccessCheckNeeded | 1 << Map::kIsObserved));
   __ b(ne, &slow);
   // Check if the object is a JS array or not.
   __ ldrb(r4, FieldMemOperand(receiver_map, Map::kInstanceTypeOffset));
diff --git a/src/arm/macro-assembler-arm.cc b/src/arm/macro-assembler-arm.cc
index 82066a6..8bb4a08 100644
--- a/src/arm/macro-assembler-arm.cc
+++ b/src/arm/macro-assembler-arm.cc
@@ -858,96 +858,6 @@
 }
 
 
-void MacroAssembler::LoadNumber(Register object,
-                                LowDwVfpRegister dst,
-                                Register heap_number_map,
-                                Register scratch,
-                                Label* not_number) {
-  Label is_smi, done;
-
-  UntagAndJumpIfSmi(scratch, object, &is_smi);
-  JumpIfNotHeapNumber(object, heap_number_map, scratch, not_number);
-
-  vldr(dst, FieldMemOperand(object, HeapNumber::kValueOffset));
-  b(&done);
-
-  // Handle loading a double from a smi.
-  bind(&is_smi);
-  vmov(dst.high(), scratch);
-  vcvt_f64_s32(dst, dst.high());
-
-  bind(&done);
-}
-
-
-void MacroAssembler::LoadNumberAsInt32Double(Register object,
-                                             DwVfpRegister double_dst,
-                                             Register heap_number_map,
-                                             Register scratch,
-                                             LowDwVfpRegister double_scratch,
-                                             Label* not_int32) {
-  ASSERT(!scratch.is(object));
-  ASSERT(!heap_number_map.is(object) && !heap_number_map.is(scratch));
-
-  Label done, obj_is_not_smi;
-
-  UntagAndJumpIfNotSmi(scratch, object, &obj_is_not_smi);
-  vmov(double_scratch.low(), scratch);
-  vcvt_f64_s32(double_dst, double_scratch.low());
-  b(&done);
-
-  bind(&obj_is_not_smi);
-  JumpIfNotHeapNumber(object, heap_number_map, scratch, not_int32);
-
-  // Load the number.
-  // Load the double value.
-  vldr(double_dst, FieldMemOperand(object, HeapNumber::kValueOffset));
-
-  TestDoubleIsInt32(double_dst, double_scratch);
-  // Jump to not_int32 if the operation did not succeed.
-  b(ne, not_int32);
-
-  bind(&done);
-}
-
-
-void MacroAssembler::LoadNumberAsInt32(Register object,
-                                       Register dst,
-                                       Register heap_number_map,
-                                       Register scratch,
-                                       DwVfpRegister double_scratch0,
-                                       LowDwVfpRegister double_scratch1,
-                                       Label* not_int32) {
-  ASSERT(!dst.is(object));
-  ASSERT(!scratch.is(object));
-
-  Label done, maybe_undefined;
-
-  UntagAndJumpIfSmi(dst, object, &done);
-
-  JumpIfNotHeapNumber(object, heap_number_map, scratch, &maybe_undefined);
-
-  // Object is a heap number.
-  // Convert the floating point value to a 32-bit integer.
-  // Load the double value.
-  vldr(double_scratch0, FieldMemOperand(object, HeapNumber::kValueOffset));
-
-  TryDoubleToInt32Exact(dst, double_scratch0, double_scratch1);
-  // Jump to not_int32 if the operation did not succeed.
-  b(ne, not_int32);
-  b(&done);
-
-  bind(&maybe_undefined);
-  CompareRoot(object, Heap::kUndefinedValueRootIndex);
-  b(ne, not_int32);
-  // |undefined| is truncated to 0.
-  mov(dst, Operand(Smi::FromInt(0)));
-  // Fall through.
-
-  bind(&done);
-}
-
-
 void MacroAssembler::Prologue(PrologueFrameMode frame_mode) {
   if (frame_mode == BUILD_STUB_FRAME) {
     stm(db_w, sp, cp.bit() | fp.bit() | lr.bit());
diff --git a/src/arm/stub-cache-arm.cc b/src/arm/stub-cache-arm.cc
index a7ddc8e..88e220e 100644
--- a/src/arm/stub-cache-arm.cc
+++ b/src/arm/stub-cache-arm.cc
@@ -1298,34 +1298,33 @@
 }
 
 
-void LoadStubCompiler::HandlerFrontendFooter(Handle<Name> name,
-                                             Label* success,
-                                             Label* miss) {
+void LoadStubCompiler::HandlerFrontendFooter(Handle<Name> name, Label* miss) {
   if (!miss->is_unused()) {
-    __ b(success);
+    Label success;
+    __ b(&success);
     __ bind(miss);
     TailCallBuiltin(masm(), MissBuiltin(kind()));
+    __ bind(&success);
   }
 }
 
 
-void StoreStubCompiler::HandlerFrontendFooter(Handle<Name> name,
-                                              Label* success,
-                                              Label* miss) {
+void StoreStubCompiler::HandlerFrontendFooter(Handle<Name> name, Label* miss) {
   if (!miss->is_unused()) {
-    __ b(success);
+    Label success;
+    __ b(&success);
     GenerateRestoreName(masm(), miss, name);
     TailCallBuiltin(masm(), MissBuiltin(kind()));
+    __ bind(&success);
   }
 }
 
 
 Register LoadStubCompiler::CallbackHandlerFrontend(
-    Handle<JSObject> object,
+    Handle<Object> object,
     Register object_reg,
     Handle<JSObject> holder,
     Handle<Name> name,
-    Label* success,
     Handle<Object> callback) {
   Label miss;
 
@@ -1362,7 +1361,7 @@
     __ b(ne, &miss);
   }
 
-  HandlerFrontendFooter(name, success, &miss);
+  HandlerFrontendFooter(name, &miss);
   return reg;
 }
 
@@ -1468,7 +1467,7 @@
 
 void LoadStubCompiler::GenerateLoadInterceptor(
     Register holder_reg,
-    Handle<JSObject> object,
+    Handle<Object> object,
     Handle<JSObject> interceptor_holder,
     LookupResult* lookup,
     Handle<Name> name) {
@@ -1686,7 +1685,7 @@
   }
 
   Handle<AllocationSite> site = isolate()->factory()->NewAllocationSite();
-  site->set_transition_info(Smi::FromInt(GetInitialFastElementsKind()));
+  site->SetElementsKind(GetInitialFastElementsKind());
   Handle<Cell> site_feedback_cell = isolate()->factory()->NewCell(site);
   __ mov(r0, Operand(argc));
   __ mov(r2, Operand(site_feedback_cell));
@@ -1718,8 +1717,12 @@
   //  -- sp[argc * 4]           : receiver
   // -----------------------------------
 
-  // If object is not an array, bail out to regular call.
-  if (!object->IsJSArray() || !cell.is_null()) return Handle<Code>::null();
+  // If object is not an array or is observed, bail out to regular call.
+  if (!object->IsJSArray() ||
+      !cell.is_null() ||
+      Handle<JSArray>::cast(object)->map()->is_observed()) {
+    return Handle<Code>::null();
+  }
 
   Label miss;
   GenerateNameCheck(name, &miss);
@@ -1972,8 +1975,12 @@
   //  -- sp[argc * 4]           : receiver
   // -----------------------------------
 
-  // If object is not an array, bail out to regular call.
-  if (!object->IsJSArray() || !cell.is_null()) return Handle<Code>::null();
+  // If object is not an array or is observed, bail out to regular call.
+  if (!object->IsJSArray() ||
+      !cell.is_null() ||
+      Handle<JSArray>::cast(object)->map()->is_observed()) {
+    return Handle<Code>::null();
+  }
 
   Label miss, return_undefined, call_builtin;
   Register receiver = r1;
@@ -2539,11 +2546,23 @@
 }
 
 
+void StubCompiler::GenerateBooleanCheck(Register object, Label* miss) {
+  Label success;
+  // Check that the object is a boolean.
+  __ LoadRoot(ip, Heap::kTrueValueRootIndex);
+  __ cmp(object, ip);
+  __ b(eq, &success);
+  __ LoadRoot(ip, Heap::kFalseValueRootIndex);
+  __ cmp(object, ip);
+  __ b(ne, miss);
+  __ bind(&success);
+}
+
+
 void CallStubCompiler::CompileHandlerFrontend(Handle<Object> object,
                                               Handle<JSObject> holder,
                                               Handle<Name> name,
-                                              CheckType check,
-                                              Label* success) {
+                                              CheckType check) {
   // ----------- S t a t e -------------
   //  -- r2    : name
   //  -- lr    : return address
@@ -2619,15 +2638,8 @@
       break;
     }
     case BOOLEAN_CHECK: {
-      Label fast;
-      // Check that the object is a boolean.
-      __ LoadRoot(ip, Heap::kTrueValueRootIndex);
-      __ cmp(r1, ip);
-      __ b(eq, &fast);
-      __ LoadRoot(ip, Heap::kFalseValueRootIndex);
-      __ cmp(r1, ip);
-      __ b(ne, &miss);
-      __ bind(&fast);
+      GenerateBooleanCheck(r1, &miss);
+
       // Check that the maps starting from the prototype haven't changed.
       GenerateDirectLoadGlobalFunctionPrototype(
           masm(), Context::BOOLEAN_FUNCTION_INDEX, r0, &miss);
@@ -2638,11 +2650,14 @@
     }
   }
 
-  __ b(success);
+  Label success;
+  __ b(&success);
 
   // Handle call cache miss.
   __ bind(&miss);
   GenerateMissBranch();
+
+  __ bind(&success);
 }
 
 
@@ -2671,10 +2686,7 @@
     if (!code.is_null()) return code;
   }
 
-  Label success;
-
-  CompileHandlerFrontend(object, holder, name, check, &success);
-  __ bind(&success);
+  CompileHandlerFrontend(object, holder, name, check);
   CompileHandlerBackend(function);
 
   // Return the generated code.
@@ -2785,9 +2797,7 @@
     Handle<JSObject> holder,
     Handle<Name> name,
     Handle<ExecutableAccessorInfo> callback) {
-  Label success;
-  HandlerFrontend(object, receiver(), holder, name, &success);
-  __ bind(&success);
+  HandlerFrontend(object, receiver(), holder, name);
 
   // Stub never generated for non-global objects that require access checks.
   ASSERT(holder->IsJSGlobalProxy() || !holder->IsAccessCheckNeeded());
@@ -2813,9 +2823,7 @@
     Handle<JSObject> holder,
     Handle<Name> name,
     const CallOptimization& call_optimization) {
-  Label success;
-  HandlerFrontend(object, receiver(), holder, name, &success);
-  __ bind(&success);
+  HandlerFrontend(object, receiver(), holder, name);
 
   Register values[] = { value() };
   GenerateFastApiCall(
@@ -2910,15 +2918,12 @@
 
 
 Handle<Code> LoadStubCompiler::CompileLoadNonexistent(
-    Handle<JSObject> object,
+    Handle<Object> object,
     Handle<JSObject> last,
     Handle<Name> name,
     Handle<JSGlobalObject> global) {
-  Label success;
+  NonexistentHandlerFrontend(object, last, name, global);
 
-  NonexistentHandlerFrontend(object, last, name, &success, global);
-
-  __ bind(&success);
   // Return undefined if maps of the full prototype chain are still the
   // same and no global property with this name contains a value.
   __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
@@ -3013,12 +3018,12 @@
 
 
 Handle<Code> LoadStubCompiler::CompileLoadGlobal(
-    Handle<JSObject> object,
+    Handle<Object> object,
     Handle<GlobalObject> global,
     Handle<PropertyCell> cell,
     Handle<Name> name,
     bool is_dont_delete) {
-  Label success, miss;
+  Label miss;
 
   HandlerFrontendHeader(object, receiver(), global, name, &miss);
 
@@ -3033,8 +3038,7 @@
     __ b(eq, &miss);
   }
 
-  HandlerFrontendFooter(name, &success, &miss);
-  __ bind(&success);
+  HandlerFrontendFooter(name, &miss);
 
   Counters* counters = isolate()->counters();
   __ IncrementCounter(counters->named_load_global_stub(), 1, r1, r3);
@@ -3058,18 +3062,26 @@
     GenerateNameCheck(name, this->name(), &miss);
   }
 
-  __ JumpIfSmi(receiver(), &miss);
+  Label number_case;
+  Label* smi_target = HasHeapNumberMap(receiver_maps) ? &number_case : &miss;
+  __ JumpIfSmi(receiver(), smi_target);
+
   Register map_reg = scratch1();
 
   int receiver_count = receiver_maps->length();
   int number_of_handled_maps = 0;
   __ ldr(map_reg, FieldMemOperand(receiver(), HeapObject::kMapOffset));
+  Handle<Map> heap_number_map = isolate()->factory()->heap_number_map();
   for (int current = 0; current < receiver_count; ++current) {
     Handle<Map> map = receiver_maps->at(current);
     if (!map->is_deprecated()) {
       number_of_handled_maps++;
       __ mov(ip, Operand(receiver_maps->at(current)));
       __ cmp(map_reg, ip);
+      if (map.is_identical_to(heap_number_map)) {
+        ASSERT(!number_case.is_unused());
+        __ bind(&number_case);
+      }
       __ Jump(handlers->at(current), RelocInfo::CODE_TARGET, eq);
     }
   }
diff --git a/src/builtins.cc b/src/builtins.cc
index 758967e..4077272 100644
--- a/src/builtins.cc
+++ b/src/builtins.cc
@@ -311,6 +311,7 @@
     Heap* heap, Object* receiver, Arguments* args, int first_added_arg) {
   if (!receiver->IsJSArray()) return NULL;
   JSArray* array = JSArray::cast(receiver);
+  if (array->map()->is_observed()) return NULL;
   HeapObject* elms = array->elements();
   Map* map = elms->map();
   if (map == heap->fixed_array_map()) {
diff --git a/src/code-stubs-hydrogen.cc b/src/code-stubs-hydrogen.cc
index a790179..eb3bac8 100644
--- a/src/code-stubs-hydrogen.cc
+++ b/src/code-stubs-hydrogen.cc
@@ -694,27 +694,7 @@
   HInstruction* argument = Add<HAccessArgumentsAt>(
       elements, constant_one, constant_zero);
 
-  HConstant* max_alloc_length =
-      Add<HConstant>(JSObject::kInitialMaxFastElementArray);
-  const int initial_capacity = JSArray::kPreallocatedArrayElements;
-  HConstant* initial_capacity_node = Add<HConstant>(initial_capacity);
-
-  HInstruction* checked_arg = Add<HBoundsCheck>(argument, max_alloc_length);
-  IfBuilder if_builder(this);
-  if_builder.If<HCompareNumericAndBranch>(checked_arg, constant_zero,
-                                          Token::EQ);
-  if_builder.Then();
-  Push(initial_capacity_node);  // capacity
-  Push(constant_zero);  // length
-  if_builder.Else();
-  Push(checked_arg);  // capacity
-  Push(checked_arg);  // length
-  if_builder.End();
-
-  // Figure out total size
-  HValue* length = Pop();
-  HValue* capacity = Pop();
-  return array_builder->AllocateArray(capacity, length, true);
+  return BuildAllocateArrayFromLength(array_builder, argument);
 }
 
 
@@ -725,11 +705,16 @@
   // the array because they aren't compatible with a smi array.
   // If it's a double array, no problem, and if it's fast then no
   // problem either because doubles are boxed.
+  //
+  // TODO(mvstanton): consider an instruction to memset fill the array
+  // with zero in this case instead.
   HValue* length = GetArgumentsLength();
-  bool fill_with_hole = IsFastSmiElementsKind(kind);
+  JSArrayBuilder::FillMode fill_mode = IsFastSmiElementsKind(kind)
+      ? JSArrayBuilder::FILL_WITH_HOLE
+      : JSArrayBuilder::DONT_FILL_WITH_HOLE;
   HValue* new_object = array_builder->AllocateArray(length,
                                                     length,
-                                                    fill_with_hole);
+                                                    fill_mode);
   HValue* elements = array_builder->GetElementsLocation();
   ASSERT(elements != NULL);
 
diff --git a/src/elements-kind.cc b/src/elements-kind.cc
index 213aa35..16f5bff 100644
--- a/src/elements-kind.cc
+++ b/src/elements-kind.cc
@@ -68,6 +68,14 @@
     fast_elements_kind_sequence[3] = FAST_HOLEY_DOUBLE_ELEMENTS;
     fast_elements_kind_sequence[4] = FAST_ELEMENTS;
     fast_elements_kind_sequence[5] = FAST_HOLEY_ELEMENTS;
+
+    // Verify that kFastElementsKindPackedToHoley is correct.
+    STATIC_ASSERT(FAST_SMI_ELEMENTS + kFastElementsKindPackedToHoley ==
+                  FAST_HOLEY_SMI_ELEMENTS);
+    STATIC_ASSERT(FAST_DOUBLE_ELEMENTS + kFastElementsKindPackedToHoley ==
+                  FAST_HOLEY_DOUBLE_ELEMENTS);
+    STATIC_ASSERT(FAST_ELEMENTS + kFastElementsKindPackedToHoley ==
+                  FAST_HOLEY_ELEMENTS);
   }
 };
 
diff --git a/src/elements-kind.h b/src/elements-kind.h
index da15192..f5280d6 100644
--- a/src/elements-kind.h
+++ b/src/elements-kind.h
@@ -77,6 +77,10 @@
 const int kFastElementsKindCount = LAST_FAST_ELEMENTS_KIND -
     FIRST_FAST_ELEMENTS_KIND + 1;
 
+// The number to add to a packed elements kind to reach a holey elements kind
+const int kFastElementsKindPackedToHoley =
+    FAST_HOLEY_SMI_ELEMENTS - FAST_SMI_ELEMENTS;
+
 const char* ElementsKindToString(ElementsKind kind);
 void PrintElementsKind(FILE* out, ElementsKind kind);
 
diff --git a/src/factory.cc b/src/factory.cc
index 6da9a2e..01f5854 100644
--- a/src/factory.cc
+++ b/src/factory.cc
@@ -451,6 +451,15 @@
 }
 
 
+Handle<AliasedArgumentsEntry> Factory::NewAliasedArgumentsEntry(
+    int aliased_context_slot) {
+  Handle<AliasedArgumentsEntry> entry = Handle<AliasedArgumentsEntry>::cast(
+      NewStruct(ALIASED_ARGUMENTS_ENTRY_TYPE));
+  entry->set_aliased_context_slot(aliased_context_slot);
+  return entry;
+}
+
+
 Handle<DeclaredAccessorDescriptor> Factory::NewDeclaredAccessorDescriptor() {
   return Handle<DeclaredAccessorDescriptor>::cast(
       NewStruct(DECLARED_ACCESSOR_DESCRIPTOR_TYPE));
@@ -626,11 +635,12 @@
   int instance_size_delta = extra_inobject_properties * kPointerSize;
   int max_instance_size_delta =
       JSObject::kMaxInstanceSize - copy->instance_size();
-  if (instance_size_delta > max_instance_size_delta) {
+  int max_extra_properties = max_instance_size_delta >> kPointerSizeLog2;
+  if (extra_inobject_properties > max_extra_properties) {
     // If the instance size overflows, we allocate as many properties
     // as we can as inobject properties.
     instance_size_delta = max_instance_size_delta;
-    extra_inobject_properties = max_instance_size_delta >> kPointerSizeLog2;
+    extra_inobject_properties = max_extra_properties;
   }
   // Adjust the map with the extra inobject properties.
   int inobject_properties =
diff --git a/src/factory.h b/src/factory.h
index 663f56f..92086d4 100644
--- a/src/factory.h
+++ b/src/factory.h
@@ -224,6 +224,9 @@
   // the old generation).
   Handle<Struct> NewStruct(InstanceType type);
 
+  Handle<AliasedArgumentsEntry> NewAliasedArgumentsEntry(
+      int aliased_context_slot);
+
   Handle<DeclaredAccessorDescriptor> NewDeclaredAccessorDescriptor();
 
   Handle<DeclaredAccessorInfo> NewDeclaredAccessorInfo();
diff --git a/src/heap-profiler.cc b/src/heap-profiler.cc
index c0016fb..46320b6 100644
--- a/src/heap-profiler.cc
+++ b/src/heap-profiler.cc
@@ -158,6 +158,7 @@
 
 void HeapProfiler::StartHeapAllocationsRecording() {
   StartHeapObjectsTracking();
+  heap()->DisableInlineAllocation();
   is_tracking_allocations_ = true;
   DropCompiledCode();
   snapshots_->UpdateHeapObjectsMap();
@@ -166,6 +167,7 @@
 
 void HeapProfiler::StopHeapAllocationsRecording() {
   StopHeapObjectsTracking();
+  heap()->EnableInlineAllocation();
   is_tracking_allocations_ = false;
   DropCompiledCode();
 }
diff --git a/src/heap-snapshot-generator.cc b/src/heap-snapshot-generator.cc
index b3d905a..b7b7f22 100644
--- a/src/heap-snapshot-generator.cc
+++ b/src/heap-snapshot-generator.cc
@@ -29,6 +29,7 @@
 
 #include "heap-snapshot-generator-inl.h"
 
+#include "allocation-tracker.h"
 #include "code-stubs.h"
 #include "heap-profiler.h"
 #include "debug.h"
@@ -748,7 +749,8 @@
 HeapSnapshotsCollection::HeapSnapshotsCollection(Heap* heap)
     : is_tracking_objects_(false),
       names_(heap),
-      ids_(heap) {
+      ids_(heap),
+      allocation_tracker_(NULL) {
 }
 
 
@@ -758,10 +760,29 @@
 
 
 HeapSnapshotsCollection::~HeapSnapshotsCollection() {
+  delete allocation_tracker_;
   snapshots_.Iterate(DeleteHeapSnapshot);
 }
 
 
+void HeapSnapshotsCollection::StartHeapObjectsTracking() {
+  ids_.UpdateHeapObjectsMap();
+  if (allocation_tracker_ == NULL) {
+    allocation_tracker_ = new AllocationTracker(&ids_, names());
+  }
+  is_tracking_objects_ = true;
+}
+
+
+void HeapSnapshotsCollection::StopHeapObjectsTracking() {
+  ids_.StopHeapObjectsTracking();
+  if (allocation_tracker_ != NULL) {
+    delete allocation_tracker_;
+    allocation_tracker_ = NULL;
+  }
+}
+
+
 HeapSnapshot* HeapSnapshotsCollection::NewSnapshot(const char* name,
                                                    unsigned uid) {
   is_tracking_objects_ = true;  // Start watching for heap objects moves.
@@ -805,6 +826,15 @@
 }
 
 
+void HeapSnapshotsCollection::NewObjectEvent(Address addr, int size) {
+  DisallowHeapAllocation no_allocation;
+  ids_.NewObject(addr, size);
+  if (allocation_tracker_ != NULL) {
+    allocation_tracker_->NewObjectEvent(addr, size);
+  }
+}
+
+
 size_t HeapSnapshotsCollection::GetUsedMemorySize() const {
   size_t size = sizeof(*this);
   size += names_.GetUsedMemorySize();
@@ -2645,6 +2675,10 @@
 const int HeapSnapshotJSONSerializer::kNodeFieldsCount = 5;
 
 void HeapSnapshotJSONSerializer::Serialize(v8::OutputStream* stream) {
+  if (AllocationTracker* allocation_tracker =
+      snapshot_->collection()->allocation_tracker()) {
+    allocation_tracker->PrepareForSerialization();
+  }
   ASSERT(writer_ == NULL);
   writer_ = new OutputStreamWriter(stream);
   SerializeImpl();
@@ -2668,6 +2702,16 @@
   SerializeEdges();
   if (writer_->aborted()) return;
   writer_->AddString("],\n");
+
+  writer_->AddString("\"trace_function_infos\":[");
+  SerializeTraceNodeInfos();
+  if (writer_->aborted()) return;
+  writer_->AddString("],\n");
+  writer_->AddString("\"trace_tree\":[");
+  SerializeTraceTree();
+  if (writer_->aborted()) return;
+  writer_->AddString("],\n");
+
   writer_->AddString("\"strings\":[");
   SerializeStrings();
   if (writer_->aborted()) return;
@@ -2828,7 +2872,20 @@
             JSON_S("shortcut") ","
             JSON_S("weak")) ","
         JSON_S("string_or_number") ","
-        JSON_S("node"))));
+        JSON_S("node")) ","
+    JSON_S("trace_function_info_fields") ":" JSON_A(
+        JSON_S("function_id") ","
+        JSON_S("name") ","
+        JSON_S("script_name") ","
+        JSON_S("script_id") ","
+        JSON_S("line") ","
+        JSON_S("column")) ","
+    JSON_S("trace_node_fields") ":" JSON_A(
+        JSON_S("id") ","
+        JSON_S("function_id") ","
+        JSON_S("count") ","
+        JSON_S("size") ","
+        JSON_S("children"))));
 #undef JSON_S
 #undef JSON_O
 #undef JSON_A
@@ -2836,6 +2893,13 @@
   writer_->AddNumber(snapshot_->entries().length());
   writer_->AddString(",\"edge_count\":");
   writer_->AddNumber(snapshot_->edges().length());
+  writer_->AddString(",\"trace_function_count\":");
+  uint32_t count = 0;
+  AllocationTracker* tracker = snapshot_->collection()->allocation_tracker();
+  if (tracker) {
+    count = tracker->id_to_function_info()->occupancy();
+  }
+  writer_->AddNumber(count);
 }
 
 
@@ -2849,6 +2913,100 @@
 }
 
 
+void HeapSnapshotJSONSerializer::SerializeTraceTree() {
+  AllocationTracker* tracker = snapshot_->collection()->allocation_tracker();
+  if (!tracker) return;
+  AllocationTraceTree* traces = tracker->trace_tree();
+  SerializeTraceNode(traces->root());
+}
+
+
+void HeapSnapshotJSONSerializer::SerializeTraceNode(AllocationTraceNode* node) {
+  // The buffer needs space for 4 unsigned ints, 4 commas, [ and \0
+  const int kBufferSize =
+      4 * MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned  // NOLINT
+      + 4 + 1 + 1;
+  EmbeddedVector<char, kBufferSize> buffer;
+  int buffer_pos = 0;
+  buffer_pos = utoa(node->id(), buffer, buffer_pos);
+  buffer[buffer_pos++] = ',';
+  buffer_pos = utoa(node->function_id(), buffer, buffer_pos);
+  buffer[buffer_pos++] = ',';
+  buffer_pos = utoa(node->allocation_count(), buffer, buffer_pos);
+  buffer[buffer_pos++] = ',';
+  buffer_pos = utoa(node->allocation_size(), buffer, buffer_pos);
+  buffer[buffer_pos++] = ',';
+  buffer[buffer_pos++] = '[';
+  buffer[buffer_pos++] = '\0';
+  writer_->AddString(buffer.start());
+
+  Vector<AllocationTraceNode*> children = node->children();
+  for (int i = 0; i < children.length(); i++) {
+    if (i > 0) {
+      writer_->AddCharacter(',');
+    }
+    SerializeTraceNode(children[i]);
+  }
+  writer_->AddCharacter(']');
+}
+
+
+// 0-based position is converted to 1-based during the serialization.
+static int SerializePosition(int position, const Vector<char>& buffer,
+                             int buffer_pos) {
+  if (position == -1) {
+    buffer[buffer_pos++] = '0';
+  } else {
+    ASSERT(position >= 0);
+    buffer_pos = utoa(static_cast<unsigned>(position + 1), buffer, buffer_pos);
+  }
+  return buffer_pos;
+}
+
+
+void HeapSnapshotJSONSerializer::SerializeTraceNodeInfos() {
+  AllocationTracker* tracker = snapshot_->collection()->allocation_tracker();
+  if (!tracker) return;
+  // The buffer needs space for 6 unsigned ints, 6 commas, \n and \0
+  const int kBufferSize =
+      6 * MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned  // NOLINT
+      + 6 + 1 + 1;
+  EmbeddedVector<char, kBufferSize> buffer;
+  HashMap* id_to_function_info = tracker->id_to_function_info();
+  bool first_entry = true;
+  for (HashMap::Entry* p = id_to_function_info->Start();
+       p != NULL;
+       p = id_to_function_info->Next(p)) {
+    SnapshotObjectId id =
+        static_cast<SnapshotObjectId>(reinterpret_cast<intptr_t>(p->key));
+    AllocationTracker::FunctionInfo* info =
+        reinterpret_cast<AllocationTracker::FunctionInfo* >(p->value);
+    int buffer_pos = 0;
+    if (first_entry) {
+      first_entry = false;
+    } else {
+      buffer[buffer_pos++] = ',';
+    }
+    buffer_pos = utoa(id, buffer, buffer_pos);
+    buffer[buffer_pos++] = ',';
+    buffer_pos = utoa(GetStringId(info->name), buffer, buffer_pos);
+    buffer[buffer_pos++] = ',';
+    buffer_pos = utoa(GetStringId(info->script_name), buffer, buffer_pos);
+    buffer[buffer_pos++] = ',';
+    // The cast is safe because script id is a non-negative Smi.
+    buffer_pos = utoa(static_cast<unsigned>(info->script_id), buffer,
+        buffer_pos);
+    buffer[buffer_pos++] = ',';
+    buffer_pos = SerializePosition(info->line, buffer, buffer_pos);
+    buffer[buffer_pos++] = ',';
+    buffer_pos = SerializePosition(info->column, buffer, buffer_pos);
+    buffer[buffer_pos++] = '\n';
+    buffer[buffer_pos++] = '\0';
+    writer_->AddString(buffer.start());
+  }
+}
+
+
 void HeapSnapshotJSONSerializer::SerializeString(const unsigned char* s) {
   writer_->AddCharacter('\n');
   writer_->AddCharacter('\"');
diff --git a/src/heap-snapshot-generator.h b/src/heap-snapshot-generator.h
index f78db55..b8199ba 100644
--- a/src/heap-snapshot-generator.h
+++ b/src/heap-snapshot-generator.h
@@ -33,6 +33,8 @@
 namespace v8 {
 namespace internal {
 
+class AllocationTracker;
+class AllocationTraceNode;
 class HeapEntry;
 class HeapSnapshot;
 
@@ -296,8 +298,8 @@
   SnapshotObjectId PushHeapObjectsStats(OutputStream* stream) {
     return ids_.PushHeapObjectsStats(stream);
   }
-  void StartHeapObjectsTracking() { is_tracking_objects_ = true; }
-  void StopHeapObjectsTracking() { ids_.StopHeapObjectsTracking(); }
+  void StartHeapObjectsTracking();
+  void StopHeapObjectsTracking();
 
   HeapSnapshot* NewSnapshot(const char* name, unsigned uid);
   void SnapshotGenerationFinished(HeapSnapshot* snapshot);
@@ -305,6 +307,7 @@
   void RemoveSnapshot(HeapSnapshot* snapshot);
 
   StringsStorage* names() { return &names_; }
+  AllocationTracker* allocation_tracker() { return allocation_tracker_; }
 
   SnapshotObjectId FindObjectId(Address object_addr) {
     return ids_.FindEntry(object_addr);
@@ -316,7 +319,7 @@
   void ObjectMoveEvent(Address from, Address to, int size) {
     ids_.MoveObject(from, to, size);
   }
-  void NewObjectEvent(Address addr, int size) { ids_.NewObject(addr, size); }
+  void NewObjectEvent(Address addr, int size);
   void UpdateObjectSizeEvent(Address addr, int size) {
     ids_.UpdateObjectSize(addr, size);
   }
@@ -335,6 +338,7 @@
   StringsStorage names_;
   // Mapping from HeapObject addresses to objects' uids.
   HeapObjectsMap ids_;
+  AllocationTracker* allocation_tracker_;
 
   DISALLOW_COPY_AND_ASSIGN(HeapSnapshotsCollection);
 };
@@ -675,6 +679,9 @@
   void SerializeNode(HeapEntry* entry);
   void SerializeNodes();
   void SerializeSnapshot();
+  void SerializeTraceTree();
+  void SerializeTraceNode(AllocationTraceNode* node);
+  void SerializeTraceNodeInfos();
   void SerializeString(const unsigned char* s);
   void SerializeStrings();
 
diff --git a/src/heap.cc b/src/heap.cc
index b5d03bc..66eeadb 100644
--- a/src/heap.cc
+++ b/src/heap.cc
@@ -114,6 +114,7 @@
       amount_of_external_allocated_memory_(0),
       amount_of_external_allocated_memory_at_last_global_gc_(0),
       old_gen_exhausted_(false),
+      inline_allocation_disabled_(false),
       store_buffer_rebuilder_(store_buffer()),
       hidden_string_(NULL),
       gc_safe_size_of_old_object_(NULL),
@@ -938,6 +939,8 @@
 
 
 void Heap::UpdateSurvivalRateTrend(int start_new_space_size) {
+  if (start_new_space_size == 0) return;
+
   double survival_rate =
       (static_cast<double>(young_survivors_after_last_gc_) * 100) /
       start_new_space_size;
@@ -4598,8 +4601,7 @@
   // advice
   Map* initial_map = constructor->initial_map();
 
-  Smi* smi = Smi::cast(allocation_site->transition_info());
-  ElementsKind to_kind = static_cast<ElementsKind>(smi->value());
+  ElementsKind to_kind = allocation_site->GetElementsKind();
   AllocationSiteMode mode = TRACK_ALLOCATION_SITE;
   if (to_kind != initial_map->elements_kind()) {
     MaybeObject* maybe_new_map = initial_map->AsElementsKind(to_kind);
@@ -6581,6 +6583,32 @@
 }
 
 
+void Heap::EnableInlineAllocation() {
+  ASSERT(inline_allocation_disabled_);
+  inline_allocation_disabled_ = false;
+
+  // Update inline allocation limit for new space.
+  new_space()->UpdateInlineAllocationLimit(0);
+}
+
+
+void Heap::DisableInlineAllocation() {
+  ASSERT(!inline_allocation_disabled_);
+  inline_allocation_disabled_ = true;
+
+  // Update inline allocation limit for new space.
+  new_space()->UpdateInlineAllocationLimit(0);
+
+  // Update inline allocation limit for old spaces.
+  PagedSpaces spaces(this);
+  for (PagedSpace* space = spaces.next();
+       space != NULL;
+       space = spaces.next()) {
+    space->EmptyAllocationInfo();
+  }
+}
+
+
 V8_DECLARE_ONCE(initialize_gc_once);
 
 static void InitializeGCOnce() {
diff --git a/src/heap.h b/src/heap.h
index a0c85e5..ef1e415 100644
--- a/src/heap.h
+++ b/src/heap.h
@@ -1547,6 +1547,13 @@
     return Min(limit, halfway_to_the_max);
   }
 
+  // Indicates whether inline bump-pointer allocation has been disabled.
+  bool inline_allocation_disabled() { return inline_allocation_disabled_; }
+
+  // Switch whether inline bump-pointer allocation should be used.
+  void EnableInlineAllocation();
+  void DisableInlineAllocation();
+
   // Implements the corresponding V8 API function.
   bool IdleNotification(int hint);
 
@@ -1993,6 +2000,10 @@
   // last GC.
   bool old_gen_exhausted_;
 
+  // Indicates that inline bump-pointer allocation has been globally disabled
+  // for all spaces. This is used to disable allocations in generated code.
+  bool inline_allocation_disabled_;
+
   // Weak list heads, threaded through the objects.
   // List heads are initilized lazily and contain the undefined_value at start.
   Object* native_contexts_list_;
diff --git a/src/hydrogen.cc b/src/hydrogen.cc
index 33586f3..67bea00 100644
--- a/src/hydrogen.cc
+++ b/src/hydrogen.cc
@@ -1909,6 +1909,48 @@
 }
 
 
+
+HValue* HGraphBuilder::BuildAllocateArrayFromLength(
+    JSArrayBuilder* array_builder,
+    HValue* length_argument) {
+  if (length_argument->IsConstant() &&
+      HConstant::cast(length_argument)->HasSmiValue()) {
+    int array_length = HConstant::cast(length_argument)->Integer32Value();
+    HValue* new_object = array_length == 0
+        ? array_builder->AllocateEmptyArray()
+        : array_builder->AllocateArray(length_argument, length_argument);
+    return new_object;
+  }
+
+  HValue* constant_zero = graph()->GetConstant0();
+  HConstant* max_alloc_length =
+      Add<HConstant>(JSObject::kInitialMaxFastElementArray);
+  HInstruction* checked_length = Add<HBoundsCheck>(length_argument,
+                                                   max_alloc_length);
+  IfBuilder if_builder(this);
+  if_builder.If<HCompareNumericAndBranch>(checked_length, constant_zero,
+                                          Token::EQ);
+  if_builder.Then();
+  const int initial_capacity = JSArray::kPreallocatedArrayElements;
+  HConstant* initial_capacity_node = Add<HConstant>(initial_capacity);
+  Push(initial_capacity_node);  // capacity
+  Push(constant_zero);          // length
+  if_builder.Else();
+  if (!(top_info()->IsStub()) &&
+      IsFastPackedElementsKind(array_builder->kind())) {
+    // We'll come back later with better (holey) feedback.
+    if_builder.Deopt("Holey array despite packed elements_kind feedback");
+  }
+  Push(checked_length);         // capacity
+  Push(checked_length);         // length
+  if_builder.End();
+
+  // Figure out total size
+  HValue* length = Pop();
+  HValue* capacity = Pop();
+  return array_builder->AllocateArray(capacity, length);
+}
+
 HValue* HGraphBuilder::BuildAllocateElements(ElementsKind kind,
                                              HValue* capacity) {
   int elements_size;
@@ -2097,19 +2139,18 @@
       : Add<HConstant>(nan_double);
 
   // Special loop unfolding case
-  static const int kLoopUnfoldLimit = 4;
-  bool unfold_loop = false;
-  int initial_capacity = JSArray::kPreallocatedArrayElements;
-  if (from->ActualValue()->IsConstant() && to->ActualValue()->IsConstant() &&
-      initial_capacity <= kLoopUnfoldLimit) {
+  static const int kLoopUnfoldLimit = 8;
+  STATIC_ASSERT(JSArray::kPreallocatedArrayElements <= kLoopUnfoldLimit);
+  int initial_capacity = -1;
+  if (from->ActualValue()->IsConstant() && to->ActualValue()->IsConstant()) {
     HConstant* constant_from = HConstant::cast(from->ActualValue());
     HConstant* constant_to = HConstant::cast(to->ActualValue());
 
     if (constant_from->HasInteger32Value() &&
         constant_from->Integer32Value() == 0 &&
         constant_to->HasInteger32Value() &&
-        constant_to->Integer32Value() == initial_capacity) {
-      unfold_loop = true;
+        constant_to->Integer32Value() <= kLoopUnfoldLimit) {
+      initial_capacity = constant_to->Integer32Value();
     }
   }
 
@@ -2119,7 +2160,7 @@
     elements_kind = FAST_HOLEY_ELEMENTS;
   }
 
-  if (unfold_loop) {
+  if (initial_capacity >= 0) {
     for (int i = 0; i < initial_capacity; i++) {
       HInstruction* key = Add<HConstant>(i);
       Add<HStoreKeyed>(elements, key, hole, elements_kind);
@@ -2378,6 +2419,13 @@
 
 
 HValue* HGraphBuilder::JSArrayBuilder::EmitMapCode() {
+  if (!builder()->top_info()->IsStub()) {
+    // A constant map is fine.
+    Handle<Map> map(builder()->isolate()->get_initial_js_array_map(kind_),
+                    builder()->isolate());
+    return builder()->Add<HConstant>(map);
+  }
+
   if (kind_ == GetInitialFastElementsKind()) {
     // No need for a context lookup if the kind_ matches the initial
     // map, because we can just load the map in that case.
@@ -2420,12 +2468,14 @@
 
   HInstruction* elements_size_value =
       builder()->Add<HConstant>(elements_size());
-  HInstruction* mul = builder()->Add<HMul>(length_node, elements_size_value);
-  mul->ClearFlag(HValue::kCanOverflow);
-
+  HInstruction* mul = HMul::NewImul(builder()->zone(), builder()->context(),
+                                    length_node, elements_size_value);
+  builder()->AddInstruction(mul);
   HInstruction* base = builder()->Add<HConstant>(base_size);
-  HInstruction* total_size = builder()->Add<HAdd>(base, mul);
+  HInstruction* total_size = HAdd::New(builder()->zone(), builder()->context(),
+                                       base, mul);
   total_size->ClearFlag(HValue::kCanOverflow);
+  builder()->AddInstruction(total_size);
   return total_size;
 }
 
@@ -2449,23 +2499,22 @@
   HConstant* capacity = builder()->Add<HConstant>(initial_capacity());
   return AllocateArray(size_in_bytes,
                        capacity,
-                       builder()->graph()->GetConstant0(),
-                       true);
+                       builder()->graph()->GetConstant0());
 }
 
 
 HValue* HGraphBuilder::JSArrayBuilder::AllocateArray(HValue* capacity,
                                                      HValue* length_field,
-                                                     bool fill_with_hole) {
+                                                     FillMode fill_mode) {
   HValue* size_in_bytes = EstablishAllocationSize(capacity);
-  return AllocateArray(size_in_bytes, capacity, length_field, fill_with_hole);
+  return AllocateArray(size_in_bytes, capacity, length_field, fill_mode);
 }
 
 
 HValue* HGraphBuilder::JSArrayBuilder::AllocateArray(HValue* size_in_bytes,
                                                      HValue* capacity,
                                                      HValue* length_field,
-                                                     bool fill_with_hole) {
+                                                     FillMode fill_mode) {
   // These HForceRepresentations are because we store these as fields in the
   // objects we construct, and an int32-to-smi HChange could deopt. Accept
   // the deopt possibility now, before allocation occurs.
@@ -2499,7 +2548,7 @@
   // Initialize the elements
   builder()->BuildInitializeElementsHeader(elements_location_, kind_, capacity);
 
-  if (fill_with_hole) {
+  if (fill_mode == FILL_WITH_HOLE) {
     builder()->BuildFillElementsWithHole(elements_location_, kind_,
                                          graph()->GetConstant0(), capacity);
   }
@@ -7535,6 +7584,71 @@
 }
 
 
+void HOptimizedGraphBuilder::BuildInlinedCallNewArray(CallNew* expr) {
+  NoObservableSideEffectsScope no_effects(this);
+
+  int argument_count = expr->arguments()->length();
+  // We should at least have the constructor on the expression stack.
+  HValue* constructor = environment()->ExpressionStackAt(argument_count);
+
+  ElementsKind kind = expr->elements_kind();
+  Handle<Cell> cell = expr->allocation_info_cell();
+  AllocationSite* site = AllocationSite::cast(cell->value());
+
+  // Register on the site for deoptimization if the cell value changes.
+  site->AddDependentCompilationInfo(AllocationSite::TRANSITIONS, top_info());
+  HInstruction* cell_instruction = Add<HConstant>(cell);
+
+  // In the single constant argument case, we may have to adjust elements kind
+  // to avoid creating a packed non-empty array.
+  if (argument_count == 1 && !IsHoleyElementsKind(kind)) {
+    HValue* argument = environment()->Top();
+    if (argument->IsConstant()) {
+      HConstant* constant_argument = HConstant::cast(argument);
+      ASSERT(constant_argument->HasSmiValue());
+      int constant_array_size = constant_argument->Integer32Value();
+      if (constant_array_size != 0) {
+        kind = GetHoleyElementsKind(kind);
+      }
+    }
+  }
+
+  // Build the array.
+  JSArrayBuilder array_builder(this,
+                               kind,
+                               cell_instruction,
+                               constructor,
+                               DISABLE_ALLOCATION_SITES);
+  HValue* new_object;
+  if (argument_count == 0) {
+    new_object = array_builder.AllocateEmptyArray();
+  } else if (argument_count == 1) {
+    HValue* argument = environment()->Top();
+    new_object = BuildAllocateArrayFromLength(&array_builder, argument);
+  } else {
+    HValue* length = Add<HConstant>(argument_count);
+    // Smi arrays need to initialize array elements with the hole because
+    // bailout could occur if the arguments don't fit in a smi.
+    //
+    // TODO(mvstanton): If all the arguments are constants in smi range, then
+    // we could set fill_with_hole to false and save a few instructions.
+    JSArrayBuilder::FillMode fill_mode = IsFastSmiElementsKind(kind)
+        ? JSArrayBuilder::FILL_WITH_HOLE
+        : JSArrayBuilder::DONT_FILL_WITH_HOLE;
+    new_object = array_builder.AllocateArray(length, length, fill_mode);
+    HValue* elements = array_builder.GetElementsLocation();
+    for (int i = 0; i < argument_count; i++) {
+      HValue* value = environment()->ExpressionStackAt(argument_count - i - 1);
+      HValue* constant_i = Add<HConstant>(i);
+      Add<HStoreKeyed>(elements, constant_i, value, kind);
+    }
+  }
+
+  Drop(argument_count + 1);  // drop constructor and args.
+  ast_context()->ReturnValue(new_object);
+}
+
+
 // Checks whether allocation using the given constructor can be inlined.
 static bool IsAllocationInlineable(Handle<JSFunction> constructor) {
   return constructor->has_initial_map() &&
@@ -7544,6 +7658,50 @@
 }
 
 
+bool HOptimizedGraphBuilder::IsCallNewArrayInlineable(CallNew* expr) {
+  bool inline_ok = false;
+  Handle<JSFunction> caller = current_info()->closure();
+  Handle<JSFunction> target(isolate()->global_context()->array_function(),
+                            isolate());
+  int argument_count = expr->arguments()->length();
+  // We should have the function plus array arguments on the environment stack.
+  ASSERT(environment()->length() >= (argument_count + 1));
+  Handle<Cell> cell = expr->allocation_info_cell();
+  AllocationSite* site = AllocationSite::cast(cell->value());
+  if (site->CanInlineCall()) {
+    // We also want to avoid inlining in certain 1 argument scenarios.
+    if (argument_count == 1) {
+      HValue* argument = Top();
+      if (argument->IsConstant()) {
+        // Do not inline if the constant length argument is not a smi or
+        // outside the valid range for a fast array.
+        HConstant* constant_argument = HConstant::cast(argument);
+        if (constant_argument->HasSmiValue()) {
+          int value = constant_argument->Integer32Value();
+          inline_ok = value >= 0 &&
+              value < JSObject::kInitialMaxFastElementArray;
+          if (!inline_ok) {
+            TraceInline(target, caller,
+                        "Length outside of valid array range");
+          }
+        }
+      } else {
+        inline_ok = true;
+      }
+    } else {
+      inline_ok = true;
+    }
+  } else {
+    TraceInline(target, caller, "AllocationSite requested no inlining.");
+  }
+
+  if (inline_ok) {
+    TraceInline(target, caller, NULL);
+  }
+  return inline_ok;
+}
+
+
 void HOptimizedGraphBuilder::VisitCallNew(CallNew* expr) {
   ASSERT(!HasStackOverflow());
   ASSERT(current_block() != NULL);
@@ -7552,14 +7710,15 @@
   int argument_count = expr->arguments()->length() + 1;  // Plus constructor.
   Factory* factory = isolate()->factory();
 
+  // The constructor function is on the stack in the unoptimized code
+  // during evaluation of the arguments.
+  CHECK_ALIVE(VisitForValue(expr->expression()));
+  HValue* function = Top();
+  CHECK_ALIVE(VisitExpressions(expr->arguments()));
+
   if (FLAG_inline_construct &&
       expr->IsMonomorphic() &&
       IsAllocationInlineable(expr->target())) {
-    // The constructor function is on the stack in the unoptimized code
-    // during evaluation of the arguments.
-    CHECK_ALIVE(VisitForValue(expr->expression()));
-    HValue* function = Top();
-    CHECK_ALIVE(VisitExpressions(expr->arguments()));
     Handle<JSFunction> constructor = expr->target();
     HValue* check = Add<HCheckValue>(function, constructor);
 
@@ -7646,19 +7805,24 @@
     // argument to the construct call.
     Handle<JSFunction> array_function(
         isolate()->global_context()->array_function(), isolate());
-    CHECK_ALIVE(VisitArgument(expr->expression()));
-    HValue* constructor = HPushArgument::cast(Top())->argument();
-    CHECK_ALIVE(VisitArgumentList(expr->arguments()));
-    HBinaryCall* call;
-    if (expr->target().is_identical_to(array_function)) {
-      Handle<Cell> cell = expr->allocation_info_cell();
-      Add<HCheckValue>(constructor, array_function);
-      call = New<HCallNewArray>(constructor, argument_count,
-                                cell, expr->elements_kind());
-    } else {
-      call = New<HCallNew>(constructor, argument_count);
+    bool use_call_new_array = expr->target().is_identical_to(array_function);
+    Handle<Cell> cell = expr->allocation_info_cell();
+    if (use_call_new_array && IsCallNewArrayInlineable(expr)) {
+      // Verify we are still calling the array function for our native context.
+      Add<HCheckValue>(function, array_function);
+      BuildInlinedCallNewArray(expr);
+      return;
     }
-    Drop(argument_count);
+
+    HBinaryCall* call;
+    if (use_call_new_array) {
+      Add<HCheckValue>(function, array_function);
+      call = New<HCallNewArray>(function, argument_count, cell,
+                                expr->elements_kind());
+    } else {
+      call = New<HCallNew>(function, argument_count);
+    }
+    PreProcessCall(call);
     return ast_context()->ReturnInstruction(call, expr->id());
   }
 }
@@ -8195,14 +8359,27 @@
   // Special case for string addition here.
   if (op == Token::ADD &&
       (left_type->Is(Type::String()) || right_type->Is(Type::String()))) {
+    // Validate type feedback for left argument.
     if (left_type->Is(Type::String())) {
       IfBuilder if_isstring(this);
       if_isstring.If<HIsStringAndBranch>(left);
       if_isstring.Then();
       if_isstring.ElseDeopt("Expected string for LHS of binary operation");
-    } else if (left_type->Is(Type::Number())) {
+    }
+
+    // Validate type feedback for right argument.
+    if (right_type->Is(Type::String())) {
+      IfBuilder if_isstring(this);
+      if_isstring.If<HIsStringAndBranch>(right);
+      if_isstring.Then();
+      if_isstring.ElseDeopt("Expected string for RHS of binary operation");
+    }
+
+    // Convert left argument as necessary.
+    if (left_type->Is(Type::Number())) {
+      ASSERT(right_type->Is(Type::String()));
       left = BuildNumberToString(left, left_type);
-    } else {
+    } else if (!left_type->Is(Type::String())) {
       ASSERT(right_type->Is(Type::String()));
       HValue* function = AddLoadJSBuiltin(Builtins::STRING_ADD_RIGHT);
       Add<HPushArgument>(left);
@@ -8210,14 +8387,11 @@
       return NewUncasted<HInvokeFunction>(function, 2);
     }
 
-    if (right_type->Is(Type::String())) {
-      IfBuilder if_isstring(this);
-      if_isstring.If<HIsStringAndBranch>(right);
-      if_isstring.Then();
-      if_isstring.ElseDeopt("Expected string for RHS of binary operation");
-    } else if (right_type->Is(Type::Number())) {
+    // Convert right argument as necessary.
+    if (right_type->Is(Type::Number())) {
+      ASSERT(left_type->Is(Type::String()));
       right = BuildNumberToString(right, right_type);
-    } else {
+    } else if (!right_type->Is(Type::String())) {
       ASSERT(left_type->Is(Type::String()));
       HValue* function = AddLoadJSBuiltin(Builtins::STRING_ADD_LEFT);
       Add<HPushArgument>(left);
diff --git a/src/hydrogen.h b/src/hydrogen.h
index b3cb8ff..7987a97 100644
--- a/src/hydrogen.h
+++ b/src/hydrogen.h
@@ -1585,9 +1585,16 @@
                    ElementsKind kind,
                    HValue* constructor_function);
 
+    enum FillMode {
+      DONT_FILL_WITH_HOLE,
+      FILL_WITH_HOLE
+    };
+
+    ElementsKind kind() { return kind_; }
+
     HValue* AllocateEmptyArray();
     HValue* AllocateArray(HValue* capacity, HValue* length_field,
-                          bool fill_with_hole);
+                          FillMode fill_mode = FILL_WITH_HOLE);
     HValue* GetElementsLocation() { return elements_location_; }
 
    private:
@@ -1607,7 +1614,8 @@
     HValue* EstablishEmptyArrayAllocationSize();
     HValue* EstablishAllocationSize(HValue* length_node);
     HValue* AllocateArray(HValue* size_in_bytes, HValue* capacity,
-                          HValue* length_field,  bool fill_with_hole);
+                          HValue* length_field,
+                          FillMode fill_mode = FILL_WITH_HOLE);
 
     HGraphBuilder* builder_;
     ElementsKind kind_;
@@ -1617,6 +1625,9 @@
     HInnerAllocatedObject* elements_location_;
   };
 
+  HValue* BuildAllocateArrayFromLength(JSArrayBuilder* array_builder,
+                                       HValue* length_argument);
+
   HValue* BuildAllocateElements(ElementsKind kind,
                                 HValue* capacity);
 
@@ -2101,6 +2112,9 @@
                                        SmallMapList* types,
                                        Handle<String> name);
 
+  bool IsCallNewArrayInlineable(CallNew* expr);
+  void BuildInlinedCallNewArray(CallNew* expr);
+
   class PropertyAccessInfo {
    public:
     PropertyAccessInfo(Isolate* isolate, Handle<Map> map, Handle<String> name)
diff --git a/src/ia32/code-stubs-ia32.cc b/src/ia32/code-stubs-ia32.cc
index adc2d50..a1aa022 100644
--- a/src/ia32/code-stubs-ia32.cc
+++ b/src/ia32/code-stubs-ia32.cc
@@ -5785,10 +5785,12 @@
       __ Assert(equal, kExpectedAllocationSiteInCell);
     }
 
-    // Save the resulting elements kind in type info
-    __ SmiTag(edx);
-    __ mov(FieldOperand(ecx, AllocationSite::kTransitionInfoOffset), edx);
-    __ SmiUntag(edx);
+    // Save the resulting elements kind in type info. We can't just store r3
+    // in the AllocationSite::transition_info field because elements kind is
+    // restricted to a portion of the field...upper bits need to be left alone.
+    STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
+    __ add(FieldOperand(ecx, AllocationSite::kTransitionInfoOffset),
+           Immediate(Smi::FromInt(kFastElementsKindPackedToHoley)));
 
     __ bind(&normal_sequence);
     int last_index = GetSequenceIndexFromFastElementsKind(
@@ -5929,8 +5931,11 @@
       masm->isolate()->factory()->allocation_site_map()));
   __ j(not_equal, &no_info);
 
+  // Only look at the lower 16 bits of the transition info.
   __ mov(edx, FieldOperand(edx, AllocationSite::kTransitionInfoOffset));
   __ SmiUntag(edx);
+  STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
+  __ and_(edx, Immediate(AllocationSite::ElementsKindBits::kMask));
   GenerateDispatchToArrayStub(masm, DONT_OVERRIDE);
 
   __ bind(&no_info);
diff --git a/src/ia32/ic-ia32.cc b/src/ia32/ic-ia32.cc
index 0b7c4a8..dab9dd7 100644
--- a/src/ia32/ic-ia32.cc
+++ b/src/ia32/ic-ia32.cc
@@ -874,10 +874,10 @@
   __ JumpIfSmi(edx, &slow);
   // Get the map from the receiver.
   __ mov(edi, FieldOperand(edx, HeapObject::kMapOffset));
-  // Check that the receiver does not require access checks.  We need
-  // to do this because this generic stub does not perform map checks.
+  // Check that the receiver does not require access checks and is not observed.
+  // The generic stub does not perform map checks or handle observed objects.
   __ test_b(FieldOperand(edi, Map::kBitFieldOffset),
-            1 << Map::kIsAccessCheckNeeded);
+            1 << Map::kIsAccessCheckNeeded | 1 << Map::kIsObserved);
   __ j(not_zero, &slow);
   // Check that the key is a smi.
   __ JumpIfNotSmi(ecx, &slow);
diff --git a/src/ia32/stub-cache-ia32.cc b/src/ia32/stub-cache-ia32.cc
index 23e2398..b839333 100644
--- a/src/ia32/stub-cache-ia32.cc
+++ b/src/ia32/stub-cache-ia32.cc
@@ -1267,34 +1267,33 @@
 }
 
 
-void LoadStubCompiler::HandlerFrontendFooter(Handle<Name> name,
-                                             Label* success,
-                                             Label* miss) {
+void LoadStubCompiler::HandlerFrontendFooter(Handle<Name> name, Label* miss) {
   if (!miss->is_unused()) {
-    __ jmp(success);
+    Label success;
+    __ jmp(&success);
     __ bind(miss);
     TailCallBuiltin(masm(), MissBuiltin(kind()));
+    __ bind(&success);
   }
 }
 
 
-void StoreStubCompiler::HandlerFrontendFooter(Handle<Name> name,
-                                              Label* success,
-                                              Label* miss) {
+void StoreStubCompiler::HandlerFrontendFooter(Handle<Name> name, Label* miss) {
   if (!miss->is_unused()) {
-    __ jmp(success);
+    Label success;
+    __ jmp(&success);
     GenerateRestoreName(masm(), miss, name);
     TailCallBuiltin(masm(), MissBuiltin(kind()));
+    __ bind(&success);
   }
 }
 
 
 Register LoadStubCompiler::CallbackHandlerFrontend(
-    Handle<JSObject> object,
+    Handle<Object> object,
     Register object_reg,
     Handle<JSObject> holder,
     Handle<Name> name,
-    Label* success,
     Handle<Object> callback) {
   Label miss;
 
@@ -1344,7 +1343,7 @@
     __ j(not_equal, &miss);
   }
 
-  HandlerFrontendFooter(name, success, &miss);
+  HandlerFrontendFooter(name, &miss);
   return reg;
 }
 
@@ -1450,7 +1449,7 @@
 
 void LoadStubCompiler::GenerateLoadInterceptor(
     Register holder_reg,
-    Handle<JSObject> object,
+    Handle<Object> object,
     Handle<JSObject> interceptor_holder,
     LookupResult* lookup,
     Handle<Name> name) {
@@ -1705,7 +1704,7 @@
   }
 
   Handle<AllocationSite> site = isolate()->factory()->NewAllocationSite();
-  site->set_transition_info(Smi::FromInt(GetInitialFastElementsKind()));
+  site->SetElementsKind(GetInitialFastElementsKind());
   Handle<Cell> site_feedback_cell = isolate()->factory()->NewCell(site);
   __ mov(eax, Immediate(argc));
   __ mov(ebx, site_feedback_cell);
@@ -1737,8 +1736,10 @@
   //  -- esp[(argc + 1) * 4] : receiver
   // -----------------------------------
 
-  // If object is not an array, bail out to regular call.
-  if (!object->IsJSArray() || !cell.is_null()) {
+  // If object is not an array or is observed, bail out to regular call.
+  if (!object->IsJSArray() ||
+      !cell.is_null() ||
+      Handle<JSArray>::cast(object)->map()->is_observed()) {
     return Handle<Code>::null();
   }
 
@@ -1996,8 +1997,10 @@
   //  -- esp[(argc + 1) * 4] : receiver
   // -----------------------------------
 
-  // If object is not an array, bail out to regular call.
-  if (!object->IsJSArray() || !cell.is_null()) {
+  // If object is not an array or is observed, bail out to regular call.
+  if (!object->IsJSArray() ||
+      !cell.is_null() ||
+      Handle<JSArray>::cast(object)->map()->is_observed()) {
     return Handle<Code>::null();
   }
 
@@ -2613,11 +2616,21 @@
 }
 
 
+void StubCompiler::GenerateBooleanCheck(Register object, Label* miss) {
+  Label success;
+  // Check that the object is a boolean.
+  __ cmp(object, factory()->true_value());
+  __ j(equal, &success);
+  __ cmp(object, factory()->false_value());
+  __ j(not_equal, miss);
+  __ bind(&success);
+}
+
+
 void CallStubCompiler::CompileHandlerFrontend(Handle<Object> object,
                                               Handle<JSObject> holder,
                                               Handle<Name> name,
-                                              CheckType check,
-                                              Label* success) {
+                                              CheckType check) {
   // ----------- S t a t e -------------
   //  -- ecx                 : name
   //  -- esp[0]              : return address
@@ -2696,13 +2709,7 @@
       break;
     }
     case BOOLEAN_CHECK: {
-      Label fast;
-      // Check that the object is a boolean.
-      __ cmp(edx, factory()->true_value());
-      __ j(equal, &fast);
-      __ cmp(edx, factory()->false_value());
-      __ j(not_equal, &miss);
-      __ bind(&fast);
+      GenerateBooleanCheck(edx, &miss);
       // Check that the maps starting from the prototype haven't changed.
       GenerateDirectLoadGlobalFunctionPrototype(
           masm(), Context::BOOLEAN_FUNCTION_INDEX, eax, &miss);
@@ -2713,11 +2720,14 @@
     }
   }
 
-  __ jmp(success);
+  Label success;
+  __ jmp(&success);
 
   // Handle call cache miss.
   __ bind(&miss);
   GenerateMissBranch();
+
+  __ bind(&success);
 }
 
 
@@ -2747,10 +2757,7 @@
     if (!code.is_null()) return code;
   }
 
-  Label success;
-
-  CompileHandlerFrontend(object, holder, name, check, &success);
-  __ bind(&success);
+  CompileHandlerFrontend(object, holder, name, check);
   CompileHandlerBackend(function);
 
   // Return the generated code.
@@ -2885,9 +2892,7 @@
     Handle<JSObject> holder,
     Handle<Name> name,
     Handle<ExecutableAccessorInfo> callback) {
-  Label success;
-  HandlerFrontend(object, receiver(), holder, name, &success);
-  __ bind(&success);
+  HandlerFrontend(object, receiver(), holder, name);
 
   __ pop(scratch1());  // remove the return address
   __ push(receiver());
@@ -2911,9 +2916,7 @@
     Handle<JSObject> holder,
     Handle<Name> name,
     const CallOptimization& call_optimization) {
-  Label success;
-  HandlerFrontend(object, receiver(), holder, name, &success);
-  __ bind(&success);
+  HandlerFrontend(object, receiver(), holder, name);
 
   Register values[] = { value() };
   GenerateFastApiCall(
@@ -3020,15 +3023,12 @@
 
 
 Handle<Code> LoadStubCompiler::CompileLoadNonexistent(
-    Handle<JSObject> object,
+    Handle<Object> object,
     Handle<JSObject> last,
     Handle<Name> name,
     Handle<JSGlobalObject> global) {
-  Label success;
+  NonexistentHandlerFrontend(object, last, name, global);
 
-  NonexistentHandlerFrontend(object, last, name, &success, global);
-
-  __ bind(&success);
   // Return undefined if maps of the full prototype chain are still the
   // same and no global property with this name contains a value.
   __ mov(eax, isolate()->factory()->undefined_value());
@@ -3118,12 +3118,12 @@
 
 
 Handle<Code> LoadStubCompiler::CompileLoadGlobal(
-    Handle<JSObject> object,
+    Handle<Object> object,
     Handle<GlobalObject> global,
     Handle<PropertyCell> cell,
     Handle<Name> name,
     bool is_dont_delete) {
-  Label success, miss;
+  Label miss;
 
   HandlerFrontendHeader(object, receiver(), global, name, &miss);
   // Get the value from the cell.
@@ -3143,8 +3143,7 @@
     __ Check(not_equal, kDontDeleteCellsCannotContainTheHole);
   }
 
-  HandlerFrontendFooter(name, &success, &miss);
-  __ bind(&success);
+  HandlerFrontendFooter(name, &miss);
 
   Counters* counters = isolate()->counters();
   __ IncrementCounter(counters->named_load_global_stub(), 1);
@@ -3168,16 +3167,24 @@
     GenerateNameCheck(name, this->name(), &miss);
   }
 
-  __ JumpIfSmi(receiver(), &miss);
+  Label number_case;
+  Label* smi_target = HasHeapNumberMap(receiver_maps) ? &number_case : &miss;
+  __ JumpIfSmi(receiver(), smi_target);
+
   Register map_reg = scratch1();
   __ mov(map_reg, FieldOperand(receiver(), HeapObject::kMapOffset));
   int receiver_count = receiver_maps->length();
   int number_of_handled_maps = 0;
+  Handle<Map> heap_number_map = isolate()->factory()->heap_number_map();
   for (int current = 0; current < receiver_count; ++current) {
     Handle<Map> map = receiver_maps->at(current);
     if (!map->is_deprecated()) {
       number_of_handled_maps++;
       __ cmp(map_reg, map);
+      if (map.is_identical_to(heap_number_map)) {
+        ASSERT(!number_case.is_unused());
+        __ bind(&number_case);
+      }
       __ j(equal, handlers->at(current));
     }
   }
diff --git a/src/ic-inl.h b/src/ic-inl.h
index 06cbf2e..d1c31c0 100644
--- a/src/ic-inl.h
+++ b/src/ic-inl.h
@@ -100,8 +100,7 @@
 }
 
 
-InlineCacheHolderFlag IC::GetCodeCacheForObject(Object* object,
-                                                JSObject* holder) {
+InlineCacheHolderFlag IC::GetCodeCacheForObject(Object* object) {
   if (object->IsJSObject()) return OWN_MAP;
 
   // If the object is a value, we use the prototype map for the cache.
@@ -111,13 +110,13 @@
 }
 
 
-JSObject* IC::GetCodeCacheHolder(Isolate* isolate,
+HeapObject* IC::GetCodeCacheHolder(Isolate* isolate,
                                  Object* object,
                                  InlineCacheHolderFlag holder) {
-  Object* map_owner =
-      holder == OWN_MAP ? object : object->GetPrototype(isolate);
-  ASSERT(map_owner->IsJSObject());
-  return JSObject::cast(map_owner);
+  if (object->IsSmi()) holder = PROTOTYPE_MAP;
+  Object* map_owner = holder == OWN_MAP
+      ? object : object->GetPrototype(isolate);
+  return HeapObject::cast(map_owner);
 }
 
 
diff --git a/src/ic.cc b/src/ic.cc
index 640b188..5cc54d2 100644
--- a/src/ic.cc
+++ b/src/ic.cc
@@ -949,7 +949,7 @@
 }
 
 
-bool IC::UpdatePolymorphicIC(Handle<HeapObject> receiver,
+bool IC::UpdatePolymorphicIC(Handle<Object> receiver,
                              Handle<String> name,
                              Handle<Code> code) {
   if (!code->is_handler()) return false;
@@ -958,7 +958,7 @@
 
   int number_of_valid_maps;
   int handler_to_overwrite = -1;
-  Handle<Map> new_receiver_map(receiver->map());
+  Handle<Map> new_receiver_map(receiver->GetMarkerMap(isolate()));
 
   target()->FindAllMaps(&receiver_maps);
   int number_of_maps = receiver_maps.length();
@@ -1000,12 +1000,12 @@
 }
 
 
-void IC::UpdateMonomorphicIC(Handle<HeapObject> receiver,
+void IC::UpdateMonomorphicIC(Handle<Object> receiver,
                              Handle<Code> handler,
                              Handle<String> name) {
   if (!handler->is_handler()) return set_target(*handler);
   Handle<Code> ic = isolate()->stub_cache()->ComputeMonomorphicIC(
-      receiver, handler, name, strict_mode());
+      name, receiver, handler, strict_mode());
   set_target(*ic);
 }
 
@@ -1035,42 +1035,39 @@
 }
 
 
-void IC::PatchCache(Handle<HeapObject> receiver,
+void IC::PatchCache(Handle<Object> object,
                     Handle<String> name,
                     Handle<Code> code) {
   switch (state()) {
     case UNINITIALIZED:
     case PREMONOMORPHIC:
     case MONOMORPHIC_PROTOTYPE_FAILURE:
-      UpdateMonomorphicIC(receiver, code, name);
+      UpdateMonomorphicIC(object, code, name);
       break;
-    case MONOMORPHIC:
+    case MONOMORPHIC: {
       // For now, call stubs are allowed to rewrite to the same stub. This
       // happens e.g., when the field does not contain a function.
       ASSERT(target()->is_call_stub() ||
              target()->is_keyed_call_stub() ||
              !target().is_identical_to(code));
-      if (!target()->is_keyed_stub()) {
-        bool is_same_handler = false;
-        Code* old_handler = target()->FindFirstHandler();
-        is_same_handler = old_handler == *code;
-
-        if (is_same_handler &&
-            IsTransitionedMapOfMonomorphicTarget(receiver->map())) {
-          UpdateMonomorphicIC(receiver, code, name);
-          break;
-        }
+      Code* old_handler = target()->FindFirstHandler();
+      if (old_handler == *code &&
+          IsTransitionedMapOfMonomorphicTarget(
+              object->GetMarkerMap(isolate()))) {
+        UpdateMonomorphicIC(object, code, name);
+        break;
       }
       // Fall through.
+    }
     case POLYMORPHIC:
       if (!target()->is_keyed_stub()) {
-        if (UpdatePolymorphicIC(receiver, name, code)) break;
+        if (UpdatePolymorphicIC(object, name, code)) break;
         CopyICToMegamorphicCache(name);
       }
       set_target(*megamorphic_stub());
       // Fall through.
     case MEGAMORPHIC:
-      UpdateMegamorphicCache(receiver->map(), *name, *code);
+      UpdateMegamorphicCache(object->GetMarkerMap(isolate()), *name, *code);
       break;
     case DEBUG_STUB:
       break;
@@ -1097,13 +1094,6 @@
 void LoadIC::UpdateCaches(LookupResult* lookup,
                           Handle<Object> object,
                           Handle<String> name) {
-  // TODO(verwaest): It would be nice to support loading fields from smis as
-  // well. For now just fail to update the cache.
-  if (!object->IsHeapObject()) return;
-
-  Handle<HeapObject> receiver = Handle<HeapObject>::cast(object);
-
-  Handle<Code> code;
   if (state() == UNINITIALIZED) {
     // This is the first time we execute this inline cache.
     // Set the target to the pre monomorphic stub to delay
@@ -1111,27 +1101,23 @@
     set_target(*pre_monomorphic_stub());
     TRACE_IC("LoadIC", name);
     return;
-  } else if (!lookup->IsCacheable()) {
-    // Bail out if the result is not cacheable.
-    code = slow_stub();
-  } else if (object->IsString() &&
-             name->Equals(isolate()->heap()->length_string())) {
-    int length_index = String::kLengthOffset / kPointerSize;
-    code = SimpleFieldLoad(length_index);
-  } else if (!object->IsJSObject()) {
-    // TODO(jkummerow): It would be nice to support non-JSObjects in
-    // ComputeLoadHandler, then we wouldn't need to go generic here.
-    code = slow_stub();
-  } else if (!lookup->IsProperty()) {
-    code = kind() == Code::LOAD_IC
-        ? isolate()->stub_cache()->ComputeLoadNonexistent(
-              name, Handle<JSObject>::cast(receiver))
-        : slow_stub();
-  } else {
-    code = ComputeHandler(lookup, Handle<JSObject>::cast(receiver), name);
   }
 
-  PatchCache(receiver, name, code);
+  Handle<Code> code;
+  if (!lookup->IsCacheable()) {
+    // Bail out if the result is not cacheable.
+    code = slow_stub();
+  } else if (!lookup->IsProperty()) {
+    if (kind() == Code::LOAD_IC) {
+      code = isolate()->stub_cache()->ComputeLoadNonexistent(name, object);
+    } else {
+      code = slow_stub();
+    }
+  } else {
+    code = ComputeHandler(lookup, object, name);
+  }
+
+  PatchCache(object, name, code);
   TRACE_IC("LoadIC", name);
 }
 
@@ -1144,18 +1130,22 @@
 
 
 Handle<Code> IC::ComputeHandler(LookupResult* lookup,
-                                Handle<JSObject> receiver,
+                                Handle<Object> object,
                                 Handle<String> name,
                                 Handle<Object> value) {
+  InlineCacheHolderFlag cache_holder = GetCodeCacheForObject(*object);
+  Handle<HeapObject> stub_holder(GetCodeCacheHolder(
+      isolate(), *object, cache_holder));
+
   Handle<Code> code = isolate()->stub_cache()->FindHandler(
-      name, receiver, kind());
+      name, stub_holder, kind(), cache_holder, strict_mode());
   if (!code.is_null()) return code;
 
-  code = CompileHandler(lookup, receiver, name, value);
+  code = CompileHandler(lookup, object, name, value, cache_holder);
   ASSERT(code->is_handler());
 
   if (code->type() != Code::NORMAL) {
-    HeapObject::UpdateMapCodeCache(receiver, name, code);
+    HeapObject::UpdateMapCodeCache(stub_holder, name, code);
   }
 
   return code;
@@ -1163,29 +1153,35 @@
 
 
 Handle<Code> LoadIC::CompileHandler(LookupResult* lookup,
-                                    Handle<JSObject> receiver,
+                                    Handle<Object> object,
                                     Handle<String> name,
-                                    Handle<Object> unused) {
+                                    Handle<Object> unused,
+                                    InlineCacheHolderFlag cache_holder) {
+  if (object->IsString() && name->Equals(isolate()->heap()->length_string())) {
+    int length_index = String::kLengthOffset / kPointerSize;
+    return SimpleFieldLoad(length_index);
+  }
+
   Handle<JSObject> holder(lookup->holder());
-  LoadStubCompiler compiler(isolate(), kind());
+  LoadStubCompiler compiler(isolate(), cache_holder, kind());
 
   switch (lookup->type()) {
     case FIELD: {
       PropertyIndex field = lookup->GetFieldIndex();
-      if (receiver.is_identical_to(holder)) {
+      if (object.is_identical_to(holder)) {
         return SimpleFieldLoad(field.translate(holder),
                                field.is_inobject(holder),
                                lookup->representation());
       }
       return compiler.CompileLoadField(
-          receiver, holder, name, field, lookup->representation());
+          object, holder, name, field, lookup->representation());
     }
     case CONSTANT: {
       Handle<Object> constant(lookup->GetConstant(), isolate());
       // TODO(2803): Don't compute a stub for cons strings because they cannot
       // be embedded into code.
       if (constant->IsConsString()) break;
-      return compiler.CompileLoadConstant(receiver, holder, name, constant);
+      return compiler.CompileLoadConstant(object, holder, name, constant);
     }
     case NORMAL:
       if (kind() != Code::LOAD_IC) break;
@@ -1194,26 +1190,31 @@
         Handle<PropertyCell> cell(
             global->GetPropertyCell(lookup), isolate());
         Handle<Code> code = compiler.CompileLoadGlobal(
-            receiver, global, cell, name, lookup->IsDontDelete());
+            object, global, cell, name, lookup->IsDontDelete());
         // TODO(verwaest): Move caching of these NORMAL stubs outside as well.
-        HeapObject::UpdateMapCodeCache(receiver, name, code);
+        Handle<HeapObject> stub_holder(GetCodeCacheHolder(
+            isolate(), *object, cache_holder));
+        HeapObject::UpdateMapCodeCache(stub_holder, name, code);
         return code;
       }
       // There is only one shared stub for loading normalized
       // properties. It does not traverse the prototype chain, so the
-      // property must be found in the receiver for the stub to be
+      // property must be found in the object for the stub to be
       // applicable.
-      if (!holder.is_identical_to(receiver)) break;
+      if (!object.is_identical_to(holder)) break;
       return isolate()->builtins()->LoadIC_Normal();
     case CALLBACKS: {
       // Use simple field loads for some well-known callback properties.
       int object_offset;
-      Handle<Map> map(receiver->map());
-      if (Accessors::IsJSObjectFieldAccessor(map, name, &object_offset)) {
-        PropertyIndex index =
-            PropertyIndex::NewHeaderIndex(object_offset / kPointerSize);
-        return compiler.CompileLoadField(
-            receiver, receiver, name, index, Representation::Tagged());
+      if (object->IsJSObject()) {
+        Handle<JSObject> receiver = Handle<JSObject>::cast(object);
+        Handle<Map> map(receiver->map());
+        if (Accessors::IsJSObjectFieldAccessor(map, name, &object_offset)) {
+          PropertyIndex index =
+              PropertyIndex::NewHeaderIndex(object_offset / kPointerSize);
+          return compiler.CompileLoadField(
+              receiver, receiver, name, index, Representation::Tagged());
+        }
       }
 
       Handle<Object> callback(lookup->GetCallbackObject(), isolate());
@@ -1221,8 +1222,8 @@
         Handle<ExecutableAccessorInfo> info =
             Handle<ExecutableAccessorInfo>::cast(callback);
         if (v8::ToCData<Address>(info->getter()) == 0) break;
-        if (!info->IsCompatibleReceiver(*receiver)) break;
-        return compiler.CompileLoadCallback(receiver, holder, name, info);
+        if (!info->IsCompatibleReceiver(*object)) break;
+        return compiler.CompileLoadCallback(object, holder, name, info);
       } else if (callback->IsAccessorPair()) {
         Handle<Object> getter(Handle<AccessorPair>::cast(callback)->getter(),
                               isolate());
@@ -1230,13 +1231,20 @@
         if (holder->IsGlobalObject()) break;
         if (!holder->HasFastProperties()) break;
         Handle<JSFunction> function = Handle<JSFunction>::cast(getter);
+        if (!object->IsJSObject() &&
+            !function->IsBuiltin() &&
+            function->shared()->is_classic_mode()) {
+          // Calling non-strict non-builtins with a value as the receiver
+          // requires boxing.
+          break;
+        }
         CallOptimization call_optimization(function);
         if (call_optimization.is_simple_api_call() &&
-            call_optimization.IsCompatibleReceiver(*receiver)) {
+            call_optimization.IsCompatibleReceiver(*object)) {
           return compiler.CompileLoadCallback(
-              receiver, holder, name, call_optimization);
+              object, holder, name, call_optimization);
         }
-        return compiler.CompileLoadViaGetter(receiver, holder, name, function);
+        return compiler.CompileLoadViaGetter(object, holder, name, function);
       }
       // TODO(dcarney): Handle correctly.
       if (callback->IsDeclaredAccessorInfo()) break;
@@ -1246,7 +1254,7 @@
     }
     case INTERCEPTOR:
       ASSERT(HasInterceptorGetter(*holder));
-      return compiler.CompileLoadInterceptor(receiver, holder, name);
+      return compiler.CompileLoadInterceptor(object, holder, name);
     default:
       break;
   }
@@ -1338,9 +1346,14 @@
 }
 
 
-MaybeObject* KeyedLoadIC::Load(Handle<Object> object,
-                               Handle<Object> key,
-                               ICMissMode miss_mode) {
+MaybeObject* KeyedLoadIC::LoadForceGeneric(Handle<Object> object,
+                                           Handle<Object> key) {
+  set_target(*generic_stub());
+  return Runtime::GetObjectPropertyOrFail(isolate(), object, key);
+}
+
+
+MaybeObject* KeyedLoadIC::Load(Handle<Object> object, Handle<Object> key) {
   if (MigrateDeprecated(object)) {
     return Runtime::GetObjectPropertyOrFail(isolate(), object, key);
   }
@@ -1357,20 +1370,18 @@
     if (maybe_object->IsFailure()) return maybe_object;
   } else if (FLAG_use_ic && !object->IsAccessCheckNeeded()) {
     ASSERT(!object->IsJSGlobalProxy());
-    if (miss_mode != MISS_FORCE_GENERIC) {
-      if (object->IsString() && key->IsNumber()) {
-        if (state() == UNINITIALIZED) stub = string_stub();
-      } else if (object->IsJSObject()) {
-        Handle<JSObject> receiver = Handle<JSObject>::cast(object);
-        if (receiver->elements()->map() ==
-            isolate()->heap()->non_strict_arguments_elements_map()) {
-          stub = non_strict_arguments_stub();
-        } else if (receiver->HasIndexedInterceptor()) {
-          stub = indexed_interceptor_stub();
-        } else if (!key->ToSmi()->IsFailure() &&
-                   (!target().is_identical_to(non_strict_arguments_stub()))) {
-          stub = LoadElementStub(receiver);
-        }
+    if (object->IsString() && key->IsNumber()) {
+      if (state() == UNINITIALIZED) stub = string_stub();
+    } else if (object->IsJSObject()) {
+      Handle<JSObject> receiver = Handle<JSObject>::cast(object);
+      if (receiver->elements()->map() ==
+          isolate()->heap()->non_strict_arguments_elements_map()) {
+        stub = non_strict_arguments_stub();
+      } else if (receiver->HasIndexedInterceptor()) {
+        stub = indexed_interceptor_stub();
+      } else if (!key->ToSmi()->IsFailure() &&
+                 (!target().is_identical_to(non_strict_arguments_stub()))) {
+        stub = LoadElementStub(receiver);
       }
     }
   }
@@ -1582,9 +1593,14 @@
 
 
 Handle<Code> StoreIC::CompileHandler(LookupResult* lookup,
-                                     Handle<JSObject> receiver,
+                                     Handle<Object> object,
                                      Handle<String> name,
-                                     Handle<Object> value) {
+                                     Handle<Object> value,
+                                     InlineCacheHolderFlag cache_holder) {
+  ASSERT(cache_holder == OWN_MAP);
+  // This is currently guaranteed by checks in StoreIC::Store.
+  Handle<JSObject> receiver = Handle<JSObject>::cast(object);
+
   Handle<JSObject> holder(lookup->holder());
   StoreStubCompiler compiler(isolate(), strict_mode(), kind());
   switch (lookup->type()) {
@@ -1915,10 +1931,23 @@
 }
 
 
+MaybeObject* KeyedStoreIC::StoreForceGeneric(Handle<Object> object,
+                                             Handle<Object> key,
+                                             Handle<Object> value) {
+  set_target(*generic_stub());
+  Handle<Object> result = Runtime::SetObjectProperty(isolate(), object,
+                                                     key,
+                                                     value,
+                                                     NONE,
+                                                     strict_mode());
+  RETURN_IF_EMPTY_HANDLE(isolate(), result);
+  return *result;
+}
+
+
 MaybeObject* KeyedStoreIC::Store(Handle<Object> object,
                                  Handle<Object> key,
-                                 Handle<Object> value,
-                                 ICMissMode miss_mode) {
+                                 Handle<Object> value) {
   if (MigrateDeprecated(object)) {
     Handle<Object> result = Runtime::SetObjectProperty(isolate(), object,
                                                        key,
@@ -1957,24 +1986,22 @@
     if (use_ic) {
       ASSERT(!object->IsJSGlobalProxy());
 
-      if (miss_mode != MISS_FORCE_GENERIC) {
-        if (object->IsJSObject()) {
-          Handle<JSObject> receiver = Handle<JSObject>::cast(object);
-          bool key_is_smi_like = key->IsSmi() || !key->ToSmi()->IsFailure();
-          if (receiver->elements()->map() ==
-              isolate()->heap()->non_strict_arguments_elements_map()) {
-            stub = non_strict_arguments_stub();
-          } else if (key_is_smi_like &&
-                     !(target().is_identical_to(non_strict_arguments_stub()))) {
-            // We should go generic if receiver isn't a dictionary, but our
-            // prototype chain does have dictionary elements. This ensures that
-            // other non-dictionary receivers in the polymorphic case benefit
-            // from fast path keyed stores.
-            if (!(receiver->map()->DictionaryElementsInPrototypeChainOnly())) {
-              KeyedAccessStoreMode store_mode =
-                  GetStoreMode(receiver, key, value);
-              stub = StoreElementStub(receiver, store_mode);
-            }
+      if (object->IsJSObject()) {
+        Handle<JSObject> receiver = Handle<JSObject>::cast(object);
+        bool key_is_smi_like = key->IsSmi() || !key->ToSmi()->IsFailure();
+        if (receiver->elements()->map() ==
+            isolate()->heap()->non_strict_arguments_elements_map()) {
+          stub = non_strict_arguments_stub();
+        } else if (key_is_smi_like &&
+                   !(target().is_identical_to(non_strict_arguments_stub()))) {
+          // We should go generic if receiver isn't a dictionary, but our
+          // prototype chain does have dictionary elements. This ensures that
+          // other non-dictionary receivers in the polymorphic case benefit
+          // from fast path keyed stores.
+          if (!(receiver->map()->DictionaryElementsInPrototypeChainOnly())) {
+            KeyedAccessStoreMode store_mode =
+                GetStoreMode(receiver, key, value);
+            stub = StoreElementStub(receiver, store_mode);
           }
         }
       }
@@ -2073,7 +2100,7 @@
   Handle<Object> receiver = args.at<Object>(0);
   Handle<Object> key = args.at<Object>(1);
   ic.UpdateState(receiver, key);
-  return ic.Load(receiver, key, MISS);
+  return ic.Load(receiver, key);
 }
 
 
@@ -2084,7 +2111,7 @@
   Handle<Object> receiver = args.at<Object>(0);
   Handle<Object> key = args.at<Object>(1);
   ic.UpdateState(receiver, key);
-  return ic.Load(receiver, key, MISS);
+  return ic.Load(receiver, key);
 }
 
 
@@ -2095,7 +2122,7 @@
   Handle<Object> receiver = args.at<Object>(0);
   Handle<Object> key = args.at<Object>(1);
   ic.UpdateState(receiver, key);
-  return ic.Load(receiver, key, MISS_FORCE_GENERIC);
+  return ic.LoadForceGeneric(receiver, key);
 }
 
 
@@ -2204,7 +2231,7 @@
   Handle<Object> receiver = args.at<Object>(0);
   Handle<Object> key = args.at<Object>(1);
   ic.UpdateState(receiver, key);
-  return ic.Store(receiver, key, args.at<Object>(2), MISS);
+  return ic.Store(receiver, key, args.at<Object>(2));
 }
 
 
@@ -2215,7 +2242,7 @@
   Handle<Object> receiver = args.at<Object>(0);
   Handle<Object> key = args.at<Object>(1);
   ic.UpdateState(receiver, key);
-  return ic.Store(receiver, key, args.at<Object>(2), MISS);
+  return ic.Store(receiver, key, args.at<Object>(2));
 }
 
 
@@ -2260,7 +2287,7 @@
   Handle<Object> receiver = args.at<Object>(0);
   Handle<Object> key = args.at<Object>(1);
   ic.UpdateState(receiver, key);
-  return ic.Store(receiver, key, args.at<Object>(2), MISS_FORCE_GENERIC);
+  return ic.StoreForceGeneric(receiver, key, args.at<Object>(2));
 }
 
 
@@ -2547,7 +2574,7 @@
 }
 
 
-void CompareIC::UpdateCaches(Handle<Object> x, Handle<Object> y) {
+Code* CompareIC::UpdateCaches(Handle<Object> x, Handle<Object> y) {
   HandleScope scope(isolate());
   State previous_left, previous_right, previous_state;
   ICCompareStub::DecodeMinorKey(target()->stub_info(), &previous_left,
@@ -2561,7 +2588,8 @@
     stub.set_known_map(
         Handle<Map>(Handle<JSObject>::cast(x)->map(), isolate()));
   }
-  set_target(*stub.GetCode(isolate()));
+  Handle<Code> new_target = stub.GetCode(isolate());
+  set_target(*new_target);
 
 #ifdef DEBUG
   if (FLAG_trace_ic) {
@@ -2583,6 +2611,8 @@
   if (previous_state == UNINITIALIZED) {
     PatchInlinedSmiCode(address(), ENABLE_INLINED_SMI_CHECK);
   }
+
+  return *new_target;
 }
 
 
@@ -2591,8 +2621,7 @@
   HandleScope scope(isolate);
   ASSERT(args.length() == 3);
   CompareIC ic(isolate, static_cast<Token::Value>(args.smi_at(2)));
-  ic.UpdateCaches(args.at<Object>(0), args.at<Object>(1));
-  return ic.raw_target();
+  return ic.UpdateCaches(args.at<Object>(0), args.at<Object>(1));
 }
 
 
@@ -2703,9 +2732,8 @@
 }
 
 
-MaybeObject* ToBooleanIC::ToBoolean(Handle<Object> object,
-                                    Code::ExtraICState extra_ic_state) {
-  ToBooleanStub stub(extra_ic_state);
+MaybeObject* ToBooleanIC::ToBoolean(Handle<Object> object) {
+  ToBooleanStub stub(target()->extended_extra_ic_state());
   bool to_boolean_value = stub.UpdateStatus(object);
   Handle<Code> code = stub.GetCode(isolate());
   set_target(*code);
@@ -2718,8 +2746,7 @@
   HandleScope scope(isolate);
   Handle<Object> object = args.at<Object>(0);
   ToBooleanIC ic(isolate);
-  Code::ExtraICState extra_ic_state = ic.target()->extended_extra_ic_state();
-  return ic.ToBoolean(object, extra_ic_state);
+  return ic.ToBoolean(object);
 }
 
 
diff --git a/src/ic.h b/src/ic.h
index fde4bc7..bffb290 100644
--- a/src/ic.h
+++ b/src/ic.h
@@ -94,10 +94,6 @@
   IC(FrameDepth depth, Isolate* isolate);
   virtual ~IC() {}
 
-  // Get the call-site target; used for determining the state.
-  Handle<Code> target() const { return target_; }
-  Code* raw_target() const { return GetTargetAtAddress(address()); }
-
   State state() const { return state_; }
   inline Address address() const;
 
@@ -130,13 +126,29 @@
     return ComputeMode() == RelocInfo::CODE_TARGET_CONTEXT;
   }
 
+#ifdef DEBUG
+  bool IsLoadStub() {
+    return target()->is_load_stub() || target()->is_keyed_load_stub();
+  }
+
+  bool IsStoreStub() {
+    return target()->is_store_stub() || target()->is_keyed_store_stub();
+  }
+
+  bool IsCallStub() {
+    return target()->is_call_stub() || target()->is_keyed_call_stub();
+  }
+#endif
+
   // Determines which map must be used for keeping the code stub.
   // These methods should not be called with undefined or null.
-  static inline InlineCacheHolderFlag GetCodeCacheForObject(Object* object,
-                                                            JSObject* holder);
-  static inline JSObject* GetCodeCacheHolder(Isolate* isolate,
-                                             Object* object,
-                                             InlineCacheHolderFlag holder);
+  static inline InlineCacheHolderFlag GetCodeCacheForObject(Object* object);
+  // TODO(verwaest): This currently returns a HeapObject rather than JSObject*
+  // since loading the IC for loading the length from strings are stored on
+  // the string map directly, rather than on the JSObject-typed prototype.
+  static inline HeapObject* GetCodeCacheHolder(Isolate* isolate,
+                                               Object* object,
+                                               InlineCacheHolderFlag holder);
 
   static bool IsCleared(Code* code) {
     InlineCacheState state = code->ic_state();
@@ -144,6 +156,9 @@
   }
 
  protected:
+  // Get the call-site target; used for determining the state.
+  Handle<Code> target() const { return target_; }
+
   Address fp() const { return fp_; }
   Address pc() const { return *pc_address_; }
   Isolate* isolate() const { return isolate_; }
@@ -180,27 +195,28 @@
 
   // Compute the handler either by compiling or by retrieving a cached version.
   Handle<Code> ComputeHandler(LookupResult* lookup,
-                              Handle<JSObject> receiver,
+                              Handle<Object> object,
                               Handle<String> name,
                               Handle<Object> value = Handle<Code>::null());
   virtual Handle<Code> CompileHandler(LookupResult* lookup,
-                                      Handle<JSObject> receiver,
+                                      Handle<Object> object,
                                       Handle<String> name,
-                                      Handle<Object> value) {
+                                      Handle<Object> value,
+                                      InlineCacheHolderFlag cache_holder) {
     UNREACHABLE();
     return Handle<Code>::null();
   }
-  void UpdateMonomorphicIC(Handle<HeapObject> receiver,
+  void UpdateMonomorphicIC(Handle<Object> receiver,
                            Handle<Code> handler,
                            Handle<String> name);
 
-  bool UpdatePolymorphicIC(Handle<HeapObject> receiver,
+  bool UpdatePolymorphicIC(Handle<Object> receiver,
                            Handle<String> name,
                            Handle<Code> code);
 
   void CopyICToMegamorphicCache(Handle<String> name);
   bool IsTransitionedMapOfMonomorphicTarget(Map* receiver_map);
-  void PatchCache(Handle<HeapObject> receiver,
+  void PatchCache(Handle<Object> object,
                   Handle<String> name,
                   Handle<Code> code);
   virtual void UpdateMegamorphicCache(Map* map, Name* name, Code* code);
@@ -226,6 +242,8 @@
   void TryRemoveInvalidHandlers(Handle<Map> map, Handle<String> name);
 
  private:
+  Code* raw_target() const { return GetTargetAtAddress(address()); }
+
   // Frame pointer for the frame that uses (calls) the IC.
   Address fp_;
 
@@ -388,7 +406,7 @@
 class LoadIC: public IC {
  public:
   explicit LoadIC(FrameDepth depth, Isolate* isolate) : IC(depth, isolate) {
-    ASSERT(target()->is_load_stub() || target()->is_keyed_load_stub());
+    ASSERT(IsLoadStub());
   }
 
   // Code generator routines.
@@ -422,9 +440,10 @@
                     Handle<String> name);
 
   virtual Handle<Code> CompileHandler(LookupResult* lookup,
-                                      Handle<JSObject> receiver,
+                                      Handle<Object> object,
                                       Handle<String> name,
-                                      Handle<Object> unused);
+                                      Handle<Object> unused,
+                                      InlineCacheHolderFlag cache_holder);
 
  private:
   // Stub accessors.
@@ -464,9 +483,11 @@
     ASSERT(target()->is_keyed_load_stub());
   }
 
+  MUST_USE_RESULT MaybeObject* LoadForceGeneric(Handle<Object> object,
+                                                Handle<Object> key);
+
   MUST_USE_RESULT MaybeObject* Load(Handle<Object> object,
-                                    Handle<Object> key,
-                                    ICMissMode force_generic);
+                                    Handle<Object> key);
 
   // Code generator routines.
   static void GenerateMiss(MacroAssembler* masm, ICMissMode force_generic);
@@ -538,7 +559,7 @@
   StoreIC(FrameDepth depth, Isolate* isolate)
       : IC(depth, isolate),
         strict_mode_(Code::GetStrictMode(target()->extra_ic_state())) {
-    ASSERT(target()->is_store_stub() || target()->is_keyed_store_stub());
+    ASSERT(IsStoreStub());
   }
 
   virtual StrictModeFlag strict_mode() const { return strict_mode_; }
@@ -617,9 +638,10 @@
                     Handle<String> name,
                     Handle<Object> value);
   virtual Handle<Code> CompileHandler(LookupResult* lookup,
-                                      Handle<JSObject> receiver,
+                                      Handle<Object> object,
                                       Handle<String> name,
-                                      Handle<Object> value);
+                                      Handle<Object> value,
+                                      InlineCacheHolderFlag cache_holder);
 
  private:
   void set_target(Code* code) {
@@ -665,10 +687,12 @@
     ASSERT(target()->is_keyed_store_stub());
   }
 
+  MUST_USE_RESULT MaybeObject* StoreForceGeneric(Handle<Object> object,
+                                                 Handle<Object> name,
+                                                 Handle<Object> value);
   MUST_USE_RESULT MaybeObject* Store(Handle<Object> object,
                                      Handle<Object> name,
-                                     Handle<Object> value,
-                                     ICMissMode force_generic);
+                                     Handle<Object> value);
 
   // Code generators for stub routines.  Only called once at startup.
   static void GenerateInitialize(MacroAssembler* masm) {
@@ -821,7 +845,7 @@
       : IC(EXTRA_CALL_FRAME, isolate), op_(op) { }
 
   // Update the inline cache for the given operands.
-  void UpdateCaches(Handle<Object> x, Handle<Object> y);
+  Code* UpdateCaches(Handle<Object> x, Handle<Object> y);
 
 
   // Factory method for getting an uninitialized compare stub.
@@ -874,7 +898,7 @@
  public:
   explicit ToBooleanIC(Isolate* isolate) : IC(EXTRA_CALL_FRAME, isolate) { }
 
-  MaybeObject* ToBoolean(Handle<Object> object, Code::ExtraICState state);
+  MaybeObject* ToBoolean(Handle<Object> object);
 };
 
 
diff --git a/src/mips/code-stubs-mips.cc b/src/mips/code-stubs-mips.cc
index 1982c64..77b5dee 100644
--- a/src/mips/code-stubs-mips.cc
+++ b/src/mips/code-stubs-mips.cc
@@ -6005,11 +6005,14 @@
       __ lw(t1, FieldMemOperand(a2, Cell::kValueOffset));
     }
 
-    // Save the resulting elements kind in type info
-    __ SmiTag(a3);
-    __ lw(t1, FieldMemOperand(a2, Cell::kValueOffset));
-    __ sw(a3, FieldMemOperand(t1, AllocationSite::kTransitionInfoOffset));
-    __ SmiUntag(a3);
+    // Save the resulting elements kind in type info. We can't just store a3
+    // in the AllocationSite::transition_info field because elements kind is
+    // restricted to a portion of the field...upper bits need to be left alone.
+    STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
+    __ lw(t0, FieldMemOperand(t1, AllocationSite::kTransitionInfoOffset));
+    __ Addu(t0, t0, Operand(Smi::FromInt(kFastElementsKindPackedToHoley)));
+    __ sw(t0, FieldMemOperand(t1, AllocationSite::kTransitionInfoOffset));
+
 
     __ bind(&normal_sequence);
     int last_index = GetSequenceIndexFromFastElementsKind(
@@ -6151,6 +6154,8 @@
 
   __ lw(a3, FieldMemOperand(a3, AllocationSite::kTransitionInfoOffset));
   __ SmiUntag(a3);
+  STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
+  __ And(a3, a3, Operand(AllocationSite::ElementsKindBits::kMask));
   GenerateDispatchToArrayStub(masm, DONT_OVERRIDE);
 
   __ bind(&no_info);
diff --git a/src/mips/ic-mips.cc b/src/mips/ic-mips.cc
index 0fe044a..c7e1a2a 100644
--- a/src/mips/ic-mips.cc
+++ b/src/mips/ic-mips.cc
@@ -1354,10 +1354,11 @@
   __ JumpIfSmi(receiver, &slow);
   // Get the map of the object.
   __ lw(receiver_map, FieldMemOperand(receiver, HeapObject::kMapOffset));
-  // Check that the receiver does not require access checks.  We need
-  // to do this because this generic stub does not perform map checks.
+  // Check that the receiver does not require access checks and is not observed.
+  // The generic stub does not perform map checks or handle observed objects.
   __ lbu(t0, FieldMemOperand(receiver_map, Map::kBitFieldOffset));
-  __ And(t0, t0, Operand(1 << Map::kIsAccessCheckNeeded));
+  __ And(t0, t0, Operand(1 << Map::kIsAccessCheckNeeded |
+                         1 << Map::kIsObserved));
   __ Branch(&slow, ne, t0, Operand(zero_reg));
   // Check if the object is a JS array or not.
   __ lbu(t0, FieldMemOperand(receiver_map, Map::kInstanceTypeOffset));
diff --git a/src/mips/macro-assembler-mips.cc b/src/mips/macro-assembler-mips.cc
index 7b85688..295e9c0 100644
--- a/src/mips/macro-assembler-mips.cc
+++ b/src/mips/macro-assembler-mips.cc
@@ -4512,116 +4512,6 @@
 }
 
 
-void MacroAssembler::LoadNumber(Register object,
-                                FPURegister dst,
-                                Register heap_number_map,
-                                Register scratch,
-                                Label* not_number) {
-  Label is_smi, done;
-
-  UntagAndJumpIfSmi(scratch, object, &is_smi);
-  JumpIfNotHeapNumber(object, heap_number_map, scratch, not_number);
-
-  ldc1(dst, FieldMemOperand(object, HeapNumber::kValueOffset));
-  Branch(&done);
-
-  bind(&is_smi);
-  mtc1(scratch, dst);
-  cvt_d_w(dst, dst);
-
-  bind(&done);
-}
-
-
-void MacroAssembler::LoadNumberAsInt32Double(Register object,
-                                             DoubleRegister double_dst,
-                                             Register heap_number_map,
-                                             Register scratch1,
-                                             Register scratch2,
-                                             FPURegister double_scratch,
-                                             Label* not_int32) {
-  ASSERT(!scratch1.is(object) && !scratch2.is(object));
-  ASSERT(!scratch1.is(scratch2));
-  ASSERT(!heap_number_map.is(object) &&
-         !heap_number_map.is(scratch1) &&
-         !heap_number_map.is(scratch2));
-
-  Label done, obj_is_not_smi;
-
-  UntagAndJumpIfNotSmi(scratch1, object, &obj_is_not_smi);
-  mtc1(scratch1, double_scratch);
-  cvt_d_w(double_dst, double_scratch);
-  Branch(&done);
-
-  bind(&obj_is_not_smi);
-  JumpIfNotHeapNumber(object, heap_number_map, scratch1, not_int32);
-
-  // Load the number.
-  // Load the double value.
-  ldc1(double_dst, FieldMemOperand(object, HeapNumber::kValueOffset));
-
-  Register except_flag = scratch2;
-  EmitFPUTruncate(kRoundToZero,
-                  scratch1,
-                  double_dst,
-                  at,
-                  double_scratch,
-                  except_flag,
-                  kCheckForInexactConversion);
-
-  // Jump to not_int32 if the operation did not succeed.
-  Branch(not_int32, ne, except_flag, Operand(zero_reg));
-  bind(&done);
-}
-
-
-void MacroAssembler::LoadNumberAsInt32(Register object,
-                                       Register dst,
-                                       Register heap_number_map,
-                                       Register scratch1,
-                                       Register scratch2,
-                                       FPURegister double_scratch0,
-                                       FPURegister double_scratch1,
-                                       Label* not_int32) {
-  ASSERT(!dst.is(object));
-  ASSERT(!scratch1.is(object) && !scratch2.is(object));
-  ASSERT(!scratch1.is(scratch2));
-
-  Label done, maybe_undefined;
-
-  UntagAndJumpIfSmi(dst, object, &done);
-
-  JumpIfNotHeapNumber(object, heap_number_map, scratch1, &maybe_undefined);
-
-  // Object is a heap number.
-  // Convert the floating point value to a 32-bit integer.
-  // Load the double value.
-  ldc1(double_scratch0, FieldMemOperand(object, HeapNumber::kValueOffset));
-
-  Register except_flag = scratch2;
-  EmitFPUTruncate(kRoundToZero,
-                  dst,
-                  double_scratch0,
-                  scratch1,
-                  double_scratch1,
-                  except_flag,
-                  kCheckForInexactConversion);
-
-  // Jump to not_int32 if the operation did not succeed.
-  Branch(not_int32, ne, except_flag, Operand(zero_reg));
-  Branch(&done);
-
-  bind(&maybe_undefined);
-  LoadRoot(at, Heap::kUndefinedValueRootIndex);
-  Branch(not_int32, ne, object, Operand(at));
-  // |undefined| is truncated to 0.
-  li(dst, Operand(Smi::FromInt(0)));
-  // Fall through.
-
-  bind(&done);
-}
-
-
 void MacroAssembler::Prologue(PrologueFrameMode frame_mode) {
   if (frame_mode == BUILD_STUB_FRAME) {
     Push(ra, fp, cp);
diff --git a/src/mips/stub-cache-mips.cc b/src/mips/stub-cache-mips.cc
index 27be2d4..25edc6d 100644
--- a/src/mips/stub-cache-mips.cc
+++ b/src/mips/stub-cache-mips.cc
@@ -1287,34 +1287,33 @@
 }
 
 
-void LoadStubCompiler::HandlerFrontendFooter(Handle<Name> name,
-                                             Label* success,
-                                             Label* miss) {
+void LoadStubCompiler::HandlerFrontendFooter(Handle<Name> name, Label* miss) {
   if (!miss->is_unused()) {
-    __ Branch(success);
+    Label success;
+    __ Branch(&success);
     __ bind(miss);
     TailCallBuiltin(masm(), MissBuiltin(kind()));
+    __ bind(&success);
   }
 }
 
 
-void StoreStubCompiler::HandlerFrontendFooter(Handle<Name> name,
-                                              Label* success,
-                                              Label* miss) {
+void StoreStubCompiler::HandlerFrontendFooter(Handle<Name> name, Label* miss) {
   if (!miss->is_unused()) {
-    __ b(success);
+    Label success;
+    __ Branch(&success);
     GenerateRestoreName(masm(), miss, name);
     TailCallBuiltin(masm(), MissBuiltin(kind()));
+    __ bind(&success);
   }
 }
 
 
 Register LoadStubCompiler::CallbackHandlerFrontend(
-    Handle<JSObject> object,
+    Handle<Object> object,
     Register object_reg,
     Handle<JSObject> holder,
     Handle<Name> name,
-    Label* success,
     Handle<Object> callback) {
   Label miss;
 
@@ -1350,7 +1349,7 @@
     __ Branch(&miss, ne, scratch2(), Operand(callback));
   }
 
-  HandlerFrontendFooter(name, success, &miss);
+  HandlerFrontendFooter(name, &miss);
   return reg;
 }
 
@@ -1460,7 +1459,7 @@
 
 void LoadStubCompiler::GenerateLoadInterceptor(
     Register holder_reg,
-    Handle<JSObject> object,
+    Handle<Object> object,
     Handle<JSObject> interceptor_holder,
     LookupResult* lookup,
     Handle<Name> name) {
@@ -1673,7 +1672,7 @@
   }
 
   Handle<AllocationSite> site = isolate()->factory()->NewAllocationSite();
-  site->set_transition_info(Smi::FromInt(GetInitialFastElementsKind()));
+  site->SetElementsKind(GetInitialFastElementsKind());
   Handle<Cell> site_feedback_cell = isolate()->factory()->NewCell(site);
   __ li(a0, Operand(argc));
   __ li(a2, Operand(site_feedback_cell));
@@ -1705,8 +1704,12 @@
   //  -- sp[argc * 4]           : receiver
   // -----------------------------------
 
-  // If object is not an array, bail out to regular call.
-  if (!object->IsJSArray() || !cell.is_null()) return Handle<Code>::null();
+  // If object is not an array or is observed, bail out to regular call.
+  if (!object->IsJSArray() ||
+      !cell.is_null() ||
+      Handle<JSArray>::cast(object)->map()->is_observed()) {
+    return Handle<Code>::null();
+  }
 
   Label miss;
 
@@ -1960,8 +1963,12 @@
   //  -- sp[argc * 4]           : receiver
   // -----------------------------------
 
-  // If object is not an array, bail out to regular call.
-  if (!object->IsJSArray() || !cell.is_null()) return Handle<Code>::null();
+  // If object is not an array or is observed, bail out to regular call.
+  if (!object->IsJSArray() ||
+      !cell.is_null() ||
+      Handle<JSArray>::cast(object)->map()->is_observed()) {
+    return Handle<Code>::null();
+  }
 
   Label miss, return_undefined, call_builtin;
   Register receiver = a1;
@@ -2550,11 +2557,21 @@
 }
 
 
+void StubCompiler::GenerateBooleanCheck(Register object, Label* miss) {
+  Label success;
+  // Check that the object is a boolean.
+  __ LoadRoot(at, Heap::kTrueValueRootIndex);
+  __ Branch(&success, eq, object, Operand(at));
+  __ LoadRoot(at, Heap::kFalseValueRootIndex);
+  __ Branch(miss, ne, object, Operand(at));
+  __ bind(&success);
+}
+
+
 void CallStubCompiler::CompileHandlerFrontend(Handle<Object> object,
                                               Handle<JSObject> holder,
                                               Handle<Name> name,
-                                              CheckType check,
-                                              Label* success) {
+                                              CheckType check) {
   // ----------- S t a t e -------------
   //  -- a2    : name
   //  -- ra    : return address
@@ -2630,13 +2647,8 @@
       break;
     }
     case BOOLEAN_CHECK: {
-      Label fast;
-      // Check that the object is a boolean.
-      __ LoadRoot(t0, Heap::kTrueValueRootIndex);
-      __ Branch(&fast, eq, a1, Operand(t0));
-      __ LoadRoot(t0, Heap::kFalseValueRootIndex);
-      __ Branch(&miss, ne, a1, Operand(t0));
-      __ bind(&fast);
+      GenerateBooleanCheck(a1, &miss);
+
       // Check that the maps starting from the prototype haven't changed.
       GenerateDirectLoadGlobalFunctionPrototype(
           masm(), Context::BOOLEAN_FUNCTION_INDEX, a0, &miss);
@@ -2647,12 +2659,15 @@
     }
   }
 
-  __ jmp(success);
+  Label success;
+  __ Branch(&success);
 
   // Handle call cache miss.
   __ bind(&miss);
 
   GenerateMissBranch();
+
+  __ bind(&success);
 }
 
 
@@ -2681,10 +2696,7 @@
     if (!code.is_null()) return code;
   }
 
-  Label success;
-
-  CompileHandlerFrontend(object, holder, name, check, &success);
-  __ bind(&success);
+  CompileHandlerFrontend(object, holder, name, check);
   CompileHandlerBackend(function);
 
   // Return the generated code.
@@ -2798,9 +2810,7 @@
     Handle<JSObject> holder,
     Handle<Name> name,
     Handle<ExecutableAccessorInfo> callback) {
-  Label success;
-  HandlerFrontend(object, receiver(), holder, name, &success);
-  __ bind(&success);
+  HandlerFrontend(object, receiver(), holder, name);
 
   // Stub never generated for non-global objects that require access
   // checks.
@@ -2827,9 +2837,7 @@
     Handle<JSObject> holder,
     Handle<Name> name,
     const CallOptimization& call_optimization) {
-  Label success;
-  HandlerFrontend(object, receiver(), holder, name, &success);
-  __ bind(&success);
+  HandlerFrontend(object, receiver(), holder, name);
 
   Register values[] = { value() };
   GenerateFastApiCall(
@@ -2925,15 +2933,12 @@
 
 
 Handle<Code> LoadStubCompiler::CompileLoadNonexistent(
-    Handle<JSObject> object,
+    Handle<Object> object,
     Handle<JSObject> last,
     Handle<Name> name,
     Handle<JSGlobalObject> global) {
-  Label success;
+  NonexistentHandlerFrontend(object, last, name, global);
 
-  NonexistentHandlerFrontend(object, last, name, &success, global);
-
-  __ bind(&success);
   // Return undefined if maps of the full prototype chain is still the same.
   __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
   __ Ret();
@@ -3025,12 +3030,12 @@
 
 
 Handle<Code> LoadStubCompiler::CompileLoadGlobal(
-    Handle<JSObject> object,
+    Handle<Object> object,
     Handle<GlobalObject> global,
     Handle<PropertyCell> cell,
     Handle<Name> name,
     bool is_dont_delete) {
-  Label success, miss;
+  Label miss;
 
   HandlerFrontendHeader(object, receiver(), global, name, &miss);
 
@@ -3044,8 +3049,7 @@
     __ Branch(&miss, eq, t0, Operand(at));
   }
 
-  HandlerFrontendFooter(name, &success, &miss);
-  __ bind(&success);
+  HandlerFrontendFooter(name, &miss);
 
   Counters* counters = isolate()->counters();
   __ IncrementCounter(counters->named_load_global_stub(), 1, a1, a3);
@@ -3069,16 +3073,24 @@
     GenerateNameCheck(name, this->name(), &miss);
   }
 
-  __ JumpIfSmi(receiver(), &miss);
+  Label number_case;
+  Label* smi_target = HasHeapNumberMap(receiver_maps) ? &number_case : &miss;
+  __ JumpIfSmi(receiver(), smi_target);
+
   Register map_reg = scratch1();
 
   int receiver_count = receiver_maps->length();
   int number_of_handled_maps = 0;
   __ lw(map_reg, FieldMemOperand(receiver(), HeapObject::kMapOffset));
+  Handle<Map> heap_number_map = isolate()->factory()->heap_number_map();
   for (int current = 0; current < receiver_count; ++current) {
     Handle<Map> map = receiver_maps->at(current);
     if (!map->is_deprecated()) {
       number_of_handled_maps++;
+      if (map.is_identical_to(heap_number_map)) {
+        ASSERT(!number_case.is_unused());
+        __ bind(&number_case);
+      }
       __ Jump(handlers->at(current), RelocInfo::CODE_TARGET,
           eq, map_reg, Operand(receiver_maps->at(current)));
     }
diff --git a/src/objects-debug.cc b/src/objects-debug.cc
index f59687a..ed93e1d 100644
--- a/src/objects-debug.cc
+++ b/src/objects-debug.cc
@@ -367,9 +367,6 @@
     SLOW_ASSERT(transitions()->IsSortedNoDuplicates());
     SLOW_ASSERT(transitions()->IsConsistentWithBackPointers(this));
   }
-  ASSERT(!is_observed() || instance_type() < FIRST_JS_OBJECT_TYPE ||
-         instance_type() > LAST_JS_OBJECT_TYPE ||
-         has_slow_elements_kind() || has_external_array_elements());
 }
 
 
diff --git a/src/objects-inl.h b/src/objects-inl.h
index a89c049..9358f42 100644
--- a/src/objects-inl.h
+++ b/src/objects-inl.h
@@ -1330,6 +1330,7 @@
 
 
 void AllocationSite::Initialize() {
+  set_transition_info(Smi::FromInt(0));
   SetElementsKind(GetInitialFastElementsKind());
   set_nested_site(Smi::FromInt(0));
   set_dependent_code(DependentCode::cast(GetHeap()->empty_fixed_array()),
@@ -1367,6 +1368,21 @@
 }
 
 
+inline DependentCode::DependencyGroup AllocationSite::ToDependencyGroup(
+    Reason reason) {
+  switch (reason) {
+    case TENURING:
+      return DependentCode::kAllocationSiteTenuringChangedGroup;
+      break;
+    case TRANSITIONS:
+      return DependentCode::kAllocationSiteTransitionChangedGroup;
+      break;
+  }
+  UNREACHABLE();
+  return DependentCode::kAllocationSiteTransitionChangedGroup;
+}
+
+
 void JSObject::EnsureCanContainHeapObjectElements(Handle<JSObject> object) {
   object->ValidateElements();
   ElementsKind elements_kind = object->map()->elements_kind();
@@ -3651,16 +3667,13 @@
 }
 
 
-void Map::set_is_observed(bool is_observed) {
-  ASSERT(instance_type() < FIRST_JS_OBJECT_TYPE ||
-         instance_type() > LAST_JS_OBJECT_TYPE ||
-         has_slow_elements_kind() || has_external_array_elements());
-  set_bit_field3(IsObserved::update(bit_field3(), is_observed));
+void Map::set_has_instance_call_handler() {
+  set_bit_field3(HasInstanceCallHandler::update(bit_field3(), true));
 }
 
 
-bool Map::is_observed() {
-  return IsObserved::decode(bit_field3());
+bool Map::has_instance_call_handler() {
+  return HasInstanceCallHandler::decode(bit_field3());
 }
 
 
@@ -4169,9 +4182,9 @@
 
 Code::Flags Code::ComputeMonomorphicFlags(Kind kind,
                                           ExtraICState extra_ic_state,
+                                          InlineCacheHolderFlag holder,
                                           StubType type,
-                                          int argc,
-                                          InlineCacheHolderFlag holder) {
+                                          int argc) {
   return ComputeFlags(kind, MONOMORPHIC, extra_ic_state, type, argc, holder);
 }
 
diff --git a/src/objects-printer.cc b/src/objects-printer.cc
index 6d2811b..5260193 100644
--- a/src/objects-printer.cc
+++ b/src/objects-printer.cc
@@ -1126,17 +1126,12 @@
   PrintF(out, "\n - nested site: ");
   nested_site()->ShortPrint(out);
   PrintF(out, "\n - transition_info: ");
-  if (transition_info()->IsCell()) {
-    Cell* cell = Cell::cast(transition_info());
-    Object* cell_contents = cell->value();
-    if (cell_contents->IsSmi()) {
-      ElementsKind kind = static_cast<ElementsKind>(
-          Smi::cast(cell_contents)->value());
-      PrintF(out, "Array allocation with ElementsKind ");
-      PrintElementsKind(out, kind);
-      PrintF(out, "\n");
-      return;
-    }
+  if (transition_info()->IsSmi()) {
+    ElementsKind kind = GetElementsKind();
+    PrintF(out, "Array allocation with ElementsKind ");
+    PrintElementsKind(out, kind);
+    PrintF(out, "\n");
+    return;
   } else if (transition_info()->IsJSArray()) {
     PrintF(out, "Array literal ");
     transition_info()->ShortPrint(out);
diff --git a/src/objects.cc b/src/objects.cc
index 935e875..671d06f 100644
--- a/src/objects.cc
+++ b/src/objects.cc
@@ -1028,6 +1028,12 @@
 }
 
 
+Map* Object::GetMarkerMap(Isolate* isolate) {
+  if (IsSmi()) return isolate->heap()->heap_number_map();
+  return HeapObject::cast(this)->map();
+}
+
+
 Object* Object::GetHash() {
   // The object is either a number, a name, an odd-ball,
   // a real JS object, or a Harmony proxy.
@@ -2966,52 +2972,44 @@
 }
 
 
-MaybeObject* JSObject::SetElementWithCallbackSetterInPrototypes(
+Handle<Object> JSObject::SetElementWithCallbackSetterInPrototypes(
+    Handle<JSObject> object,
     uint32_t index,
-    Object* value,
+    Handle<Object> value,
     bool* found,
     StrictModeFlag strict_mode) {
-  Heap* heap = GetHeap();
-  for (Object* pt = GetPrototype();
-       pt != heap->null_value();
-       pt = pt->GetPrototype(GetIsolate())) {
-    if (pt->IsJSProxy()) {
-      Isolate* isolate = GetIsolate();
-      HandleScope scope(isolate);
-      Handle<JSProxy> proxy(JSProxy::cast(pt));
-      Handle<JSObject> self(this, isolate);
-      Handle<String> name = isolate->factory()->Uint32ToString(index);
-      Handle<Object> value_handle(value, isolate);
-      Handle<Object> result = JSProxy::SetPropertyViaPrototypesWithHandler(
-          proxy, self, name, value_handle, NONE, strict_mode, found);
-      RETURN_IF_EMPTY_HANDLE(isolate, result);
-      return *result;
+  Isolate *isolate = object->GetIsolate();
+  for (Handle<Object> proto = handle(object->GetPrototype(), isolate);
+       !proto->IsNull();
+       proto = handle(proto->GetPrototype(isolate), isolate)) {
+    if (proto->IsJSProxy()) {
+      return JSProxy::SetPropertyViaPrototypesWithHandler(
+          Handle<JSProxy>::cast(proto),
+          object,
+          isolate->factory()->Uint32ToString(index),  // name
+          value,
+          NONE,
+          strict_mode,
+          found);
     }
-    if (!JSObject::cast(pt)->HasDictionaryElements()) {
+    Handle<JSObject> js_proto = Handle<JSObject>::cast(proto);
+    if (!js_proto->HasDictionaryElements()) {
       continue;
     }
-    SeededNumberDictionary* dictionary =
-        JSObject::cast(pt)->element_dictionary();
+    Handle<SeededNumberDictionary> dictionary(js_proto->element_dictionary());
     int entry = dictionary->FindEntry(index);
     if (entry != SeededNumberDictionary::kNotFound) {
       PropertyDetails details = dictionary->DetailsAt(entry);
       if (details.type() == CALLBACKS) {
         *found = true;
-        Isolate* isolate = GetIsolate();
-        HandleScope scope(isolate);
-        Handle<JSObject> self(this, isolate);
         Handle<Object> structure(dictionary->ValueAt(entry), isolate);
-        Handle<Object> value_handle(value, isolate);
-        Handle<JSObject> holder(JSObject::cast(pt));
-        Handle<Object> result = SetElementWithCallback(
-            self, structure, index, value_handle, holder, strict_mode);
-        RETURN_IF_EMPTY_HANDLE(isolate, result);
-        return *result;
+        return SetElementWithCallback(object, structure, index, value, js_proto,
+                                      strict_mode);
       }
     }
   }
   *found = false;
-  return heap->the_hole_value();
+  return isolate->factory()->the_hole_value();
 }
 
 
@@ -5614,12 +5612,6 @@
   if (object->map()->is_observed())
     return;
 
-  if (!object->HasExternalArrayElements()) {
-    // Go to dictionary mode, so that we don't skip map checks.
-    NormalizeElements(object);
-    ASSERT(!object->HasFastElements());
-  }
-
   LookupResult result(isolate);
   object->map()->LookupTransition(*object,
                                   isolate->heap()->observed_symbol(),
@@ -5633,7 +5625,7 @@
     new_map = Map::CopyForObserved(handle(object->map()));
   } else {
     new_map = Map::Copy(handle(object->map()));
-    new_map->set_is_observed(true);
+    new_map->set_is_observed();
   }
   object->set_map(*new_map);
 }
@@ -6256,7 +6248,6 @@
   Handle<SeededNumberDictionary> dictionary = NormalizeElements(object);
   ASSERT(object->HasDictionaryElements() ||
          object->HasDictionaryArgumentsElements());
-
   // Update the dictionary with the new CALLBACKS property.
   dictionary = SeededNumberDictionary::Set(dictionary, index, structure,
                                            details);
@@ -6971,7 +6962,7 @@
 
   map->set_transitions(*transitions);
 
-  new_map->set_is_observed(true);
+  new_map->set_is_observed();
 
   if (map->owns_descriptors()) {
     new_map->InitializeDescriptors(map->instance_descriptors());
@@ -11210,6 +11201,18 @@
 #endif  // ENABLE_DISASSEMBLER
 
 
+Handle<FixedArray> JSObject::SetFastElementsCapacityAndLength(
+    Handle<JSObject> object,
+    int capacity,
+    int length,
+    SetFastElementsCapacitySmiMode smi_mode) {
+  CALL_HEAP_FUNCTION(
+      object->GetIsolate(),
+      object->SetFastElementsCapacityAndLength(capacity, length, smi_mode),
+      FixedArray);
+}
+
+
 MaybeObject* JSObject::SetFastElementsCapacityAndLength(
     int capacity,
     int length,
@@ -11217,7 +11220,6 @@
   Heap* heap = GetHeap();
   // We should never end in here with a pixel or external array.
   ASSERT(!HasExternalArrayElements());
-  ASSERT(!map()->is_observed());
 
   // Allocate a new fast elements backing store.
   FixedArray* new_elements;
@@ -11296,13 +11298,22 @@
   return false;
 }
 
+
+void JSObject::SetFastDoubleElementsCapacityAndLength(Handle<JSObject> object,
+                                                      int capacity,
+                                                      int length) {
+  CALL_HEAP_FUNCTION_VOID(
+      object->GetIsolate(),
+      object->SetFastDoubleElementsCapacityAndLength(capacity, length));
+}
+
+
 MaybeObject* JSObject::SetFastDoubleElementsCapacityAndLength(
     int capacity,
     int length) {
   Heap* heap = GetHeap();
   // We should never end in here with a pixel or external array.
   ASSERT(!HasExternalArrayElements());
-  ASSERT(!map()->is_observed());
 
   FixedArrayBase* elems;
   { MaybeObject* maybe_obj =
@@ -11451,10 +11462,6 @@
   if (!new_length_handle->ToArrayIndex(&new_length))
     return Failure::InternalError();
 
-  // Observed arrays should always be in dictionary mode;
-  // if they were in fast mode, the below is slower than necessary
-  // as it iterates over the array backing store multiple times.
-  ASSERT(self->HasDictionaryElements());
   static const PropertyAttributes kNoAttrFilter = NONE;
   int num_elements = self->NumberOfLocalElements(kNoAttrFilter);
   if (num_elements > 0) {
@@ -11465,6 +11472,8 @@
       }
     } else {
       // For sparse arrays, only iterate over existing elements.
+      // TODO(rafaelw): For fast, sparse arrays, we can avoid iterating over
+      // the to-be-removed indices twice.
       Handle<FixedArray> keys = isolate->factory()->NewFixedArray(num_elements);
       self->GetLocalElementKeys(*keys, kNoAttrFilter);
       while (num_elements-- > 0) {
@@ -11633,6 +11642,9 @@
   AllowDeferredHandleDereference dependencies_are_safe;
   if (group == DependentCode::kPropertyCellChangedGroup) {
     return Handle<PropertyCell>::cast(object)->dependent_code();
+  } else if (group == DependentCode::kAllocationSiteTenuringChangedGroup ||
+      group == DependentCode::kAllocationSiteTransitionChangedGroup) {
+    return Handle<AllocationSite>::cast(object)->dependent_code();
   }
   return Handle<Map>::cast(object)->dependent_code();
 }
@@ -11645,11 +11657,9 @@
   int start = starts.at(group);
   int end = starts.at(group + 1);
   int number_of_entries = starts.number_of_entries();
-  if (start < end && entries->object_at(end - 1) == *object) {
-    // Do not append the compilation info if it is already in the array.
-    // It is sufficient to just check only the last element because
-    // we process embedded maps of an optimized code in one batch.
-    return entries;
+  // Check for existing entry to avoid duplicates.
+  for (int i = start; i < end; i++) {
+    if (entries->object_at(i) == *object) return entries;
   }
   if (entries->length() < kCodesStartIndex + number_of_entries + 1) {
     Factory* factory = entries->GetIsolate()->factory();
@@ -11932,42 +11942,38 @@
 }
 
 
-MaybeObject* JSObject::SetElementWithInterceptor(uint32_t index,
-                                                 Object* value,
-                                                 PropertyAttributes attributes,
-                                                 StrictModeFlag strict_mode,
-                                                 bool check_prototype,
-                                                 SetPropertyMode set_mode) {
-  Isolate* isolate = GetIsolate();
-  HandleScope scope(isolate);
+Handle<Object> JSObject::SetElementWithInterceptor(
+    Handle<JSObject> object,
+    uint32_t index,
+    Handle<Object> value,
+    PropertyAttributes attributes,
+    StrictModeFlag strict_mode,
+    bool check_prototype,
+    SetPropertyMode set_mode) {
+  Isolate* isolate = object->GetIsolate();
 
   // Make sure that the top context does not change when doing
   // callbacks or interceptor calls.
   AssertNoContextChange ncc(isolate);
 
-  Handle<InterceptorInfo> interceptor(GetIndexedInterceptor());
-  Handle<JSObject> this_handle(this);
-  Handle<Object> value_handle(value, isolate);
+  Handle<InterceptorInfo> interceptor(object->GetIndexedInterceptor());
   if (!interceptor->setter()->IsUndefined()) {
     v8::IndexedPropertySetterCallback setter =
         v8::ToCData<v8::IndexedPropertySetterCallback>(interceptor->setter());
     LOG(isolate,
-        ApiIndexedPropertyAccess("interceptor-indexed-set", this, index));
-    PropertyCallbackArguments args(isolate, interceptor->data(), this, this);
+        ApiIndexedPropertyAccess("interceptor-indexed-set", *object, index));
+    PropertyCallbackArguments args(isolate, interceptor->data(), *object,
+                                   *object);
     v8::Handle<v8::Value> result =
-        args.Call(setter, index, v8::Utils::ToLocal(value_handle));
-    RETURN_IF_SCHEDULED_EXCEPTION(isolate);
-    if (!result.IsEmpty()) return *value_handle;
+        args.Call(setter, index, v8::Utils::ToLocal(value));
+    RETURN_HANDLE_IF_SCHEDULED_EXCEPTION(isolate, Object);
+    if (!result.IsEmpty()) return value;
   }
-  MaybeObject* raw_result =
-      this_handle->SetElementWithoutInterceptor(index,
-                                                *value_handle,
-                                                attributes,
-                                                strict_mode,
-                                                check_prototype,
-                                                set_mode);
-  RETURN_IF_SCHEDULED_EXCEPTION(isolate);
-  return raw_result;
+
+  return SetElementWithoutInterceptor(object, index, value, attributes,
+                                      strict_mode,
+                                      check_prototype,
+                                      set_mode);
 }
 
 
@@ -12115,41 +12121,39 @@
 // Adding n elements in fast case is O(n*n).
 // Note: revisit design to have dual undefined values to capture absent
 // elements.
-MaybeObject* JSObject::SetFastElement(uint32_t index,
-                                      Object* value,
-                                      StrictModeFlag strict_mode,
-                                      bool check_prototype) {
-  ASSERT(HasFastSmiOrObjectElements() ||
-         HasFastArgumentsElements());
+Handle<Object> JSObject::SetFastElement(Handle<JSObject> object,
+                                        uint32_t index,
+                                        Handle<Object> value,
+                                        StrictModeFlag strict_mode,
+                                        bool check_prototype) {
+  ASSERT(object->HasFastSmiOrObjectElements() ||
+         object->HasFastArgumentsElements());
+
+  Isolate* isolate = object->GetIsolate();
 
   // Array optimizations rely on the prototype lookups of Array objects always
   // returning undefined. If there is a store to the initial prototype object,
   // make sure all of these optimizations are invalidated.
-  Isolate* isolate(GetIsolate());
-  if (isolate->is_initial_object_prototype(this) ||
-      isolate->is_initial_array_prototype(this)) {
-    HandleScope scope(GetIsolate());
-    map()->dependent_code()->DeoptimizeDependentCodeGroup(
-        GetIsolate(),
+  if (isolate->is_initial_object_prototype(*object) ||
+      isolate->is_initial_array_prototype(*object)) {
+    object->map()->dependent_code()->DeoptimizeDependentCodeGroup(isolate,
         DependentCode::kElementsCantBeAddedGroup);
   }
 
-  FixedArray* backing_store = FixedArray::cast(elements());
-  if (backing_store->map() == GetHeap()->non_strict_arguments_elements_map()) {
-    backing_store = FixedArray::cast(backing_store->get(1));
+  Handle<FixedArray> backing_store(FixedArray::cast(object->elements()));
+  if (backing_store->map() ==
+      isolate->heap()->non_strict_arguments_elements_map()) {
+    backing_store = handle(FixedArray::cast(backing_store->get(1)));
   } else {
-    MaybeObject* maybe = EnsureWritableFastElements();
-    if (!maybe->To(&backing_store)) return maybe;
+    backing_store = EnsureWritableFastElements(object);
   }
   uint32_t capacity = static_cast<uint32_t>(backing_store->length());
 
   if (check_prototype &&
       (index >= capacity || backing_store->get(index)->IsTheHole())) {
     bool found;
-    MaybeObject* result = SetElementWithCallbackSetterInPrototypes(index,
-                                                                   value,
-                                                                   &found,
-                                                                   strict_mode);
+    Handle<Object> result = SetElementWithCallbackSetterInPrototypes(
+        object, index, value, &found, strict_mode);
     if (found) return result;
   }
 
@@ -12158,8 +12162,8 @@
   uint32_t array_length = 0;
   bool must_update_array_length = false;
   bool introduces_holes = true;
-  if (IsJSArray()) {
-    CHECK(JSArray::cast(this)->length()->ToArrayIndex(&array_length));
+  if (object->IsJSArray()) {
+    CHECK(Handle<JSArray>::cast(object)->length()->ToArrayIndex(&array_length));
     introduces_holes = index > array_length;
     if (index >= array_length) {
       must_update_array_length = true;
@@ -12171,13 +12175,12 @@
 
   // If the array is growing, and it's not growth by a single element at the
   // end, make sure that the ElementsKind is HOLEY.
-  ElementsKind elements_kind = GetElementsKind();
+  ElementsKind elements_kind = object->GetElementsKind();
   if (introduces_holes &&
       IsFastElementsKind(elements_kind) &&
       !IsFastHoleyElementsKind(elements_kind)) {
     ElementsKind transitioned_kind = GetHoleyElementsKind(elements_kind);
-    MaybeObject* maybe = TransitionElementsKind(transitioned_kind);
-    if (maybe->IsFailure()) return maybe;
+    TransitionElementsKind(object, transitioned_kind);
   }
 
   // Check if the capacity of the backing store needs to be increased, or if
@@ -12187,94 +12190,80 @@
     if ((index - capacity) < kMaxGap) {
       new_capacity = NewElementsCapacity(index + 1);
       ASSERT(new_capacity > index);
-      if (!ShouldConvertToSlowElements(new_capacity)) {
+      if (!object->ShouldConvertToSlowElements(new_capacity)) {
         convert_to_slow = false;
       }
     }
     if (convert_to_slow) {
-      MaybeObject* result = NormalizeElements();
-      if (result->IsFailure()) return result;
-      return SetDictionaryElement(index, value, NONE, strict_mode,
+      NormalizeElements(object);
+      return SetDictionaryElement(object, index, value, NONE, strict_mode,
                                   check_prototype);
     }
   }
   // Convert to fast double elements if appropriate.
-  if (HasFastSmiElements() && !value->IsSmi() && value->IsNumber()) {
+  if (object->HasFastSmiElements() && !value->IsSmi() && value->IsNumber()) {
     // Consider fixing the boilerplate as well if we have one.
     ElementsKind to_kind = IsHoleyElementsKind(elements_kind)
         ? FAST_HOLEY_DOUBLE_ELEMENTS
         : FAST_DOUBLE_ELEMENTS;
 
-    MaybeObject* maybe_failure = UpdateAllocationSite(to_kind);
-    if (maybe_failure->IsFailure()) return maybe_failure;
+    UpdateAllocationSite(object, to_kind);
 
-    MaybeObject* maybe =
-        SetFastDoubleElementsCapacityAndLength(new_capacity, array_length);
-    if (maybe->IsFailure()) return maybe;
-    FixedDoubleArray::cast(elements())->set(index, value->Number());
-    ValidateElements();
+    SetFastDoubleElementsCapacityAndLength(object, new_capacity, array_length);
+    FixedDoubleArray::cast(object->elements())->set(index, value->Number());
+    object->ValidateElements();
     return value;
   }
   // Change elements kind from Smi-only to generic FAST if necessary.
-  if (HasFastSmiElements() && !value->IsSmi()) {
-    Map* new_map;
-    ElementsKind kind = HasFastHoleyElements()
+  if (object->HasFastSmiElements() && !value->IsSmi()) {
+    ElementsKind kind = object->HasFastHoleyElements()
         ? FAST_HOLEY_ELEMENTS
         : FAST_ELEMENTS;
 
-    MaybeObject* maybe_failure = UpdateAllocationSite(kind);
-    if (maybe_failure->IsFailure()) return maybe_failure;
-
-    MaybeObject* maybe_new_map = GetElementsTransitionMap(GetIsolate(),
-                                                          kind);
-    if (!maybe_new_map->To(&new_map)) return maybe_new_map;
-
-    set_map(new_map);
+    UpdateAllocationSite(object, kind);
+    Handle<Map> new_map = GetElementsTransitionMap(object, kind);
+    object->set_map(*new_map);
+    ASSERT(IsFastObjectElementsKind(object->GetElementsKind()));
   }
   // Increase backing store capacity if that's been decided previously.
   if (new_capacity != capacity) {
-    FixedArray* new_elements;
     SetFastElementsCapacitySmiMode smi_mode =
-        value->IsSmi() && HasFastSmiElements()
+        value->IsSmi() && object->HasFastSmiElements()
             ? kAllowSmiElements
             : kDontAllowSmiElements;
-    { MaybeObject* maybe =
-          SetFastElementsCapacityAndLength(new_capacity,
-                                           array_length,
-                                           smi_mode);
-      if (!maybe->To(&new_elements)) return maybe;
-    }
-    new_elements->set(index, value);
-    ValidateElements();
+    Handle<FixedArray> new_elements =
+        SetFastElementsCapacityAndLength(object, new_capacity, array_length,
+                                         smi_mode);
+    new_elements->set(index, *value);
+    object->ValidateElements();
     return value;
   }
 
   // Finally, set the new element and length.
-  ASSERT(elements()->IsFixedArray());
-  backing_store->set(index, value);
+  ASSERT(object->elements()->IsFixedArray());
+  backing_store->set(index, *value);
   if (must_update_array_length) {
-    JSArray::cast(this)->set_length(Smi::FromInt(array_length));
+    Handle<JSArray>::cast(object)->set_length(Smi::FromInt(array_length));
   }
   return value;
 }
 
 
-MaybeObject* JSObject::SetDictionaryElement(uint32_t index,
-                                            Object* value_raw,
-                                            PropertyAttributes attributes,
-                                            StrictModeFlag strict_mode,
-                                            bool check_prototype,
-                                            SetPropertyMode set_mode) {
-  ASSERT(HasDictionaryElements() || HasDictionaryArgumentsElements());
-  Isolate* isolate = GetIsolate();
-  Heap* heap = isolate->heap();
-  Handle<JSObject> self(this);
-  Handle<Object> value(value_raw, isolate);
+Handle<Object> JSObject::SetDictionaryElement(Handle<JSObject> object,
+                                              uint32_t index,
+                                              Handle<Object> value,
+                                              PropertyAttributes attributes,
+                                              StrictModeFlag strict_mode,
+                                              bool check_prototype,
+                                              SetPropertyMode set_mode) {
+  ASSERT(object->HasDictionaryElements() ||
+         object->HasDictionaryArgumentsElements());
+  Isolate* isolate = object->GetIsolate();
 
   // Insert element in the dictionary.
-  Handle<FixedArray> elements(FixedArray::cast(this->elements()));
+  Handle<FixedArray> elements(FixedArray::cast(object->elements()));
   bool is_arguments =
-      (elements->map() == heap->non_strict_arguments_elements_map());
+      (elements->map() == isolate->heap()->non_strict_arguments_elements_map());
   Handle<SeededNumberDictionary> dictionary(is_arguments
     ? SeededNumberDictionary::cast(elements->get(1))
     : SeededNumberDictionary::cast(*elements));
@@ -12284,10 +12273,8 @@
     Handle<Object> element(dictionary->ValueAt(entry), isolate);
     PropertyDetails details = dictionary->DetailsAt(entry);
     if (details.type() == CALLBACKS && set_mode == SET_PROPERTY) {
-      Handle<Object> result = SetElementWithCallback(self, element, index,
-                                                     value, self, strict_mode);
-      RETURN_IF_EMPTY_HANDLE(isolate, result);
-      return *result;
+      return SetElementWithCallback(object, element, index, value, object,
+                                    strict_mode);
     } else {
       dictionary->UpdateMaxNumberKey(index);
       // If a value has not been initialized we allow writing to it even if it
@@ -12299,21 +12286,22 @@
         dictionary->DetailsAtPut(entry, details);
       } else if (details.IsReadOnly() && !element->IsTheHole()) {
         if (strict_mode == kNonStrictMode) {
-          return isolate->heap()->undefined_value();
+          return isolate->factory()->undefined_value();
         } else {
-          Handle<Object> holder(this, isolate);
           Handle<Object> number = isolate->factory()->NewNumberFromUint(index);
-          Handle<Object> args[2] = { number, holder };
+          Handle<Object> args[2] = { number, object };
           Handle<Object> error =
               isolate->factory()->NewTypeError("strict_read_only_property",
                                                HandleVector(args, 2));
-          return isolate->Throw(*error);
+          isolate->Throw(*error);
+          return Handle<Object>();
         }
       }
       // Elements of the arguments object in slow mode might be slow aliases.
       if (is_arguments && element->IsAliasedArgumentsEntry()) {
-        AliasedArgumentsEntry* entry = AliasedArgumentsEntry::cast(*element);
-        Context* context = Context::cast(elements->get(0));
+        Handle<AliasedArgumentsEntry> entry =
+            Handle<AliasedArgumentsEntry>::cast(element);
+        Handle<Context> context(Context::cast(elements->get(0)));
         int context_index = entry->aliased_context_slot();
         ASSERT(!context->get(context_index)->IsTheHole());
         context->set(context_index, *value);
@@ -12327,15 +12315,16 @@
     // Can cause GC!
     if (check_prototype) {
       bool found;
-      MaybeObject* result = SetElementWithCallbackSetterInPrototypes(
-          index, *value, &found, strict_mode);
+      Handle<Object> result = SetElementWithCallbackSetterInPrototypes(object,
+          index, value, &found, strict_mode);
       if (found) return result;
     }
+
     // When we set the is_extensible flag to false we always force the
     // element into dictionary mode (and force them to stay there).
-    if (!self->map()->is_extensible()) {
+    if (!object->map()->is_extensible()) {
       if (strict_mode == kNonStrictMode) {
-        return isolate->heap()->undefined_value();
+        return isolate->factory()->undefined_value();
       } else {
         Handle<Object> number = isolate->factory()->NewNumberFromUint(index);
         Handle<String> name = isolate->factory()->NumberToString(number);
@@ -12343,36 +12332,36 @@
         Handle<Object> error =
             isolate->factory()->NewTypeError("object_not_extensible",
                                              HandleVector(args, 1));
-        return isolate->Throw(*error);
+        isolate->Throw(*error);
+        return Handle<Object>();
       }
     }
-    FixedArrayBase* new_dictionary;
+
     PropertyDetails details = PropertyDetails(attributes, NORMAL, 0);
-    MaybeObject* maybe = dictionary->AddNumberEntry(index, *value, details);
-    if (!maybe->To(&new_dictionary)) return maybe;
-    if (*dictionary != SeededNumberDictionary::cast(new_dictionary)) {
+    Handle<SeededNumberDictionary> new_dictionary =
+        SeededNumberDictionary::AddNumberEntry(dictionary, index, value,
+                                               details);
+    if (*dictionary != *new_dictionary) {
       if (is_arguments) {
-        elements->set(1, new_dictionary);
+        elements->set(1, *new_dictionary);
       } else {
-        self->set_elements(new_dictionary);
+        object->set_elements(*new_dictionary);
       }
-      dictionary =
-          handle(SeededNumberDictionary::cast(new_dictionary), isolate);
+      dictionary = new_dictionary;
     }
   }
 
   // Update the array length if this JSObject is an array.
-  if (self->IsJSArray()) {
-    MaybeObject* result =
-        JSArray::cast(*self)->JSArrayUpdateLengthFromIndex(index, *value);
-    if (result->IsFailure()) return result;
+  if (object->IsJSArray()) {
+    JSArray::JSArrayUpdateLengthFromIndex(Handle<JSArray>::cast(object), index,
+                                          value);
   }
 
   // Attempt to put this object back in fast case.
-  if (self->ShouldConvertToFastElements()) {
+  if (object->ShouldConvertToFastElements()) {
     uint32_t new_length = 0;
-    if (self->IsJSArray()) {
-      CHECK(JSArray::cast(*self)->length()->ToArrayIndex(&new_length));
+    if (object->IsJSArray()) {
+      CHECK(Handle<JSArray>::cast(object)->length()->ToArrayIndex(&new_length));
     } else {
       new_length = dictionary->max_number_key() + 1;
     }
@@ -12381,47 +12370,47 @@
         : kDontAllowSmiElements;
     bool has_smi_only_elements = false;
     bool should_convert_to_fast_double_elements =
-        self->ShouldConvertToFastDoubleElements(&has_smi_only_elements);
+        object->ShouldConvertToFastDoubleElements(&has_smi_only_elements);
     if (has_smi_only_elements) {
       smi_mode = kForceSmiElements;
     }
-    MaybeObject* result = should_convert_to_fast_double_elements
-        ? self->SetFastDoubleElementsCapacityAndLength(new_length, new_length)
-        : self->SetFastElementsCapacityAndLength(
-            new_length, new_length, smi_mode);
-    self->ValidateElements();
-    if (result->IsFailure()) return result;
+
+    if (should_convert_to_fast_double_elements) {
+      SetFastDoubleElementsCapacityAndLength(object, new_length, new_length);
+    } else {
+      SetFastElementsCapacityAndLength(object, new_length, new_length,
+                                       smi_mode);
+    }
+    object->ValidateElements();
 #ifdef DEBUG
     if (FLAG_trace_normalization) {
       PrintF("Object elements are fast case again:\n");
-      Print();
+      object->Print();
     }
 #endif
   }
-  return *value;
+  return value;
 }
 
-
-MUST_USE_RESULT MaybeObject* JSObject::SetFastDoubleElement(
+Handle<Object> JSObject::SetFastDoubleElement(
+    Handle<JSObject> object,
     uint32_t index,
-    Object* value,
+    Handle<Object> value,
     StrictModeFlag strict_mode,
     bool check_prototype) {
-  ASSERT(HasFastDoubleElements());
+  ASSERT(object->HasFastDoubleElements());
 
-  FixedArrayBase* base_elms = FixedArrayBase::cast(elements());
+  Handle<FixedArrayBase> base_elms(FixedArrayBase::cast(object->elements()));
   uint32_t elms_length = static_cast<uint32_t>(base_elms->length());
 
   // If storing to an element that isn't in the array, pass the store request
   // up the prototype chain before storing in the receiver's elements.
   if (check_prototype &&
       (index >= elms_length ||
-       FixedDoubleArray::cast(base_elms)->is_the_hole(index))) {
+       Handle<FixedDoubleArray>::cast(base_elms)->is_the_hole(index))) {
     bool found;
-    MaybeObject* result = SetElementWithCallbackSetterInPrototypes(index,
-                                                                   value,
-                                                                   &found,
-                                                                   strict_mode);
+    Handle<Object> result = SetElementWithCallbackSetterInPrototypes(object,
+        index, value, &found, strict_mode);
     if (found) return result;
   }
 
@@ -12430,48 +12419,47 @@
   bool value_is_smi = value->IsSmi();
   bool introduces_holes = true;
   uint32_t length = elms_length;
-  if (IsJSArray()) {
-    CHECK(JSArray::cast(this)->length()->ToArrayIndex(&length));
+  if (object->IsJSArray()) {
+    CHECK(Handle<JSArray>::cast(object)->length()->ToArrayIndex(&length));
     introduces_holes = index > length;
   } else {
     introduces_holes = index >= elms_length;
   }
 
   if (!value->IsNumber()) {
-    MaybeObject* maybe_obj = SetFastElementsCapacityAndLength(
-        elms_length,
-        length,
-        kDontAllowSmiElements);
-    if (maybe_obj->IsFailure()) return maybe_obj;
-    maybe_obj = SetFastElement(index, value, strict_mode, check_prototype);
-    if (maybe_obj->IsFailure()) return maybe_obj;
-    ValidateElements();
-    return maybe_obj;
+    SetFastElementsCapacityAndLength(object, elms_length, length,
+                                     kDontAllowSmiElements);
+    Handle<Object> result = SetFastElement(object, index, value, strict_mode,
+                                           check_prototype);
+    RETURN_IF_EMPTY_HANDLE_VALUE(object->GetIsolate(), result,
+                                 Handle<Object>());
+    object->ValidateElements();
+    return result;
   }
 
   double double_value = value_is_smi
-      ? static_cast<double>(Smi::cast(value)->value())
-      : HeapNumber::cast(value)->value();
+      ? static_cast<double>(Handle<Smi>::cast(value)->value())
+      : Handle<HeapNumber>::cast(value)->value();
 
   // If the array is growing, and it's not growth by a single element at the
   // end, make sure that the ElementsKind is HOLEY.
-  ElementsKind elements_kind = GetElementsKind();
+  ElementsKind elements_kind = object->GetElementsKind();
   if (introduces_holes && !IsFastHoleyElementsKind(elements_kind)) {
     ElementsKind transitioned_kind = GetHoleyElementsKind(elements_kind);
-    MaybeObject* maybe = TransitionElementsKind(transitioned_kind);
-    if (maybe->IsFailure()) return maybe;
+    TransitionElementsKind(object, transitioned_kind);
   }
 
   // Check whether there is extra space in the fixed array.
   if (index < elms_length) {
-    FixedDoubleArray* elms = FixedDoubleArray::cast(elements());
+    Handle<FixedDoubleArray> elms(FixedDoubleArray::cast(object->elements()));
     elms->set(index, double_value);
-    if (IsJSArray()) {
+    if (object->IsJSArray()) {
       // Update the length of the array if needed.
       uint32_t array_length = 0;
-      CHECK(JSArray::cast(this)->length()->ToArrayIndex(&array_length));
+      CHECK(
+          Handle<JSArray>::cast(object)->length()->ToArrayIndex(&array_length));
       if (index >= array_length) {
-        JSArray::cast(this)->set_length(Smi::FromInt(index + 1));
+        Handle<JSArray>::cast(object)->set_length(Smi::FromInt(index + 1));
       }
     }
     return value;
@@ -12481,27 +12469,23 @@
   if ((index - elms_length) < kMaxGap) {
     // Try allocating extra space.
     int new_capacity = NewElementsCapacity(index+1);
-    if (!ShouldConvertToSlowElements(new_capacity)) {
+    if (!object->ShouldConvertToSlowElements(new_capacity)) {
       ASSERT(static_cast<uint32_t>(new_capacity) > index);
-      MaybeObject* maybe_obj =
-          SetFastDoubleElementsCapacityAndLength(new_capacity, index + 1);
-      if (maybe_obj->IsFailure()) return maybe_obj;
-      FixedDoubleArray::cast(elements())->set(index, double_value);
-      ValidateElements();
+      SetFastDoubleElementsCapacityAndLength(object, new_capacity, index + 1);
+      FixedDoubleArray::cast(object->elements())->set(index, double_value);
+      object->ValidateElements();
       return value;
     }
   }
 
   // Otherwise default to slow case.
-  ASSERT(HasFastDoubleElements());
-  ASSERT(map()->has_fast_double_elements());
-  ASSERT(elements()->IsFixedDoubleArray());
-  Object* obj;
-  { MaybeObject* maybe_obj = NormalizeElements();
-    if (!maybe_obj->ToObject(&obj)) return maybe_obj;
-  }
-  ASSERT(HasDictionaryElements());
-  return SetElement(index, value, NONE, strict_mode, check_prototype);
+  ASSERT(object->HasFastDoubleElements());
+  ASSERT(object->map()->has_fast_double_elements());
+  ASSERT(object->elements()->IsFixedDoubleArray());
+
+  NormalizeElements(object);
+  ASSERT(object->HasDictionaryElements());
+  return SetElement(object, index, value, NONE, strict_mode, check_prototype);
 }
 
 
@@ -12524,262 +12508,258 @@
                                        Handle<Object> value,
                                        StrictModeFlag strict_mode) {
   ASSERT(!object->HasExternalArrayElements());
-  CALL_HEAP_FUNCTION(
-      object->GetIsolate(),
-      object->SetElement(index, *value, NONE, strict_mode, false),
-      Object);
+  return JSObject::SetElement(object, index, value, NONE, strict_mode, false);
 }
 
 
 Handle<Object> JSObject::SetElement(Handle<JSObject> object,
                                     uint32_t index,
                                     Handle<Object> value,
-                                    PropertyAttributes attr,
+                                    PropertyAttributes attributes,
                                     StrictModeFlag strict_mode,
                                     bool check_prototype,
                                     SetPropertyMode set_mode) {
+  Isolate* isolate = object->GetIsolate();
+
   if (object->HasExternalArrayElements()) {
     if (!value->IsNumber() && !value->IsUndefined()) {
       bool has_exception;
       Handle<Object> number =
-          Execution::ToNumber(object->GetIsolate(), value, &has_exception);
+          Execution::ToNumber(isolate, value, &has_exception);
       if (has_exception) return Handle<Object>();
       value = number;
     }
   }
-  CALL_HEAP_FUNCTION(
-      object->GetIsolate(),
-      object->SetElement(index, *value, attr, strict_mode, check_prototype,
-                         set_mode),
-      Object);
-}
-
-
-MaybeObject* JSObject::SetElement(uint32_t index,
-                                  Object* value_raw,
-                                  PropertyAttributes attributes,
-                                  StrictModeFlag strict_mode,
-                                  bool check_prototype,
-                                  SetPropertyMode set_mode) {
-  Isolate* isolate = GetIsolate();
 
   // Check access rights if needed.
-  if (IsAccessCheckNeeded()) {
-    if (!isolate->MayIndexedAccess(this, index, v8::ACCESS_SET)) {
-      isolate->ReportFailedAccessCheck(this, v8::ACCESS_SET);
-      RETURN_IF_SCHEDULED_EXCEPTION(isolate);
-      return value_raw;
+  if (object->IsAccessCheckNeeded()) {
+    if (!isolate->MayIndexedAccess(*object, index, v8::ACCESS_SET)) {
+      isolate->ReportFailedAccessCheck(*object, v8::ACCESS_SET);
+      RETURN_HANDLE_IF_SCHEDULED_EXCEPTION(isolate, Object);
+      return value;
     }
   }
 
-  if (IsJSGlobalProxy()) {
-    Object* proto = GetPrototype();
-    if (proto->IsNull()) return value_raw;
+  if (object->IsJSGlobalProxy()) {
+    Handle<Object> proto(object->GetPrototype(), isolate);
+    if (proto->IsNull()) return value;
     ASSERT(proto->IsJSGlobalObject());
-    return JSObject::cast(proto)->SetElement(index,
-                                             value_raw,
-                                             attributes,
-                                             strict_mode,
-                                             check_prototype,
-                                             set_mode);
+    return SetElement(Handle<JSObject>::cast(proto), index, value, attributes,
+                      strict_mode,
+                      check_prototype,
+                      set_mode);
   }
 
   // Don't allow element properties to be redefined for external arrays.
-  if (HasExternalArrayElements() && set_mode == DEFINE_PROPERTY) {
+  if (object->HasExternalArrayElements() && set_mode == DEFINE_PROPERTY) {
     Handle<Object> number = isolate->factory()->NewNumberFromUint(index);
-    Handle<Object> args[] = { handle(this, isolate), number };
+    Handle<Object> args[] = { object, number };
     Handle<Object> error = isolate->factory()->NewTypeError(
         "redef_external_array_element", HandleVector(args, ARRAY_SIZE(args)));
-    return isolate->Throw(*error);
+    isolate->Throw(*error);
+    return Handle<Object>();
   }
 
   // Normalize the elements to enable attributes on the property.
   if ((attributes & (DONT_DELETE | DONT_ENUM | READ_ONLY)) != 0) {
-    SeededNumberDictionary* dictionary;
-    MaybeObject* maybe_object = NormalizeElements();
-    if (!maybe_object->To(&dictionary)) return maybe_object;
+    Handle<SeededNumberDictionary> dictionary = NormalizeElements(object);
     // Make sure that we never go back to fast case.
     dictionary->set_requires_slow_elements();
   }
 
-  if (!(FLAG_harmony_observation && map()->is_observed())) {
-    return HasIndexedInterceptor()
-      ? SetElementWithInterceptor(
-          index, value_raw, attributes, strict_mode, check_prototype, set_mode)
-      : SetElementWithoutInterceptor(
-          index, value_raw, attributes, strict_mode, check_prototype, set_mode);
+  if (!(FLAG_harmony_observation && object->map()->is_observed())) {
+    return object->HasIndexedInterceptor()
+      ? SetElementWithInterceptor(object, index, value, attributes, strict_mode,
+                                  check_prototype,
+                                   set_mode)
+      : SetElementWithoutInterceptor(object, index, value, attributes,
+                                     strict_mode,
+                                     check_prototype,
+                                     set_mode);
   }
 
-  // From here on, everything has to be handlified.
-  Handle<JSObject> self(this);
-  Handle<Object> value(value_raw, isolate);
-  PropertyAttributes old_attributes = self->GetLocalElementAttribute(index);
+  PropertyAttributes old_attributes = object->GetLocalElementAttribute(index);
   Handle<Object> old_value = isolate->factory()->the_hole_value();
   Handle<Object> old_length_handle;
   Handle<Object> new_length_handle;
 
   if (old_attributes != ABSENT) {
-    if (self->GetLocalElementAccessorPair(index) == NULL)
-      old_value = Object::GetElement(isolate, self, index);
-  } else if (self->IsJSArray()) {
+    if (object->GetLocalElementAccessorPair(index) == NULL)
+      old_value = Object::GetElement(isolate, object, index);
+  } else if (object->IsJSArray()) {
     // Store old array length in case adding an element grows the array.
-    old_length_handle = handle(Handle<JSArray>::cast(self)->length(), isolate);
+    old_length_handle = handle(Handle<JSArray>::cast(object)->length(),
+                               isolate);
   }
 
   // Check for lookup interceptor
-  MaybeObject* result = self->HasIndexedInterceptor()
-    ? self->SetElementWithInterceptor(
-        index, *value, attributes, strict_mode, check_prototype, set_mode)
-    : self->SetElementWithoutInterceptor(
-        index, *value, attributes, strict_mode, check_prototype, set_mode);
-
-  Handle<Object> hresult;
-  if (!result->ToHandle(&hresult, isolate)) return result;
+  Handle<Object> result = object->HasIndexedInterceptor()
+    ? SetElementWithInterceptor(object, index, value, attributes, strict_mode,
+                                check_prototype,
+                                set_mode)
+    : SetElementWithoutInterceptor(object, index, value, attributes,
+                                   strict_mode,
+                                   check_prototype,
+                                   set_mode);
+  RETURN_IF_EMPTY_HANDLE_VALUE(isolate, result, Handle<Object>());
 
   Handle<String> name = isolate->factory()->Uint32ToString(index);
-  PropertyAttributes new_attributes = self->GetLocalElementAttribute(index);
+  PropertyAttributes new_attributes = object->GetLocalElementAttribute(index);
   if (old_attributes == ABSENT) {
-    if (self->IsJSArray() &&
-        !old_length_handle->SameValue(Handle<JSArray>::cast(self)->length())) {
-      new_length_handle = handle(Handle<JSArray>::cast(self)->length(),
+    if (object->IsJSArray() &&
+        !old_length_handle->SameValue(
+            Handle<JSArray>::cast(object)->length())) {
+      new_length_handle = handle(Handle<JSArray>::cast(object)->length(),
                                  isolate);
       uint32_t old_length = 0;
       uint32_t new_length = 0;
       CHECK(old_length_handle->ToArrayIndex(&old_length));
       CHECK(new_length_handle->ToArrayIndex(&new_length));
 
-      BeginPerformSplice(Handle<JSArray>::cast(self));
-      EnqueueChangeRecord(self, "add", name, old_value);
-      EnqueueChangeRecord(self, "update", isolate->factory()->length_string(),
+      BeginPerformSplice(Handle<JSArray>::cast(object));
+      EnqueueChangeRecord(object, "add", name, old_value);
+      EnqueueChangeRecord(object, "update", isolate->factory()->length_string(),
                           old_length_handle);
-      EndPerformSplice(Handle<JSArray>::cast(self));
+      EndPerformSplice(Handle<JSArray>::cast(object));
       Handle<JSArray> deleted = isolate->factory()->NewJSArray(0);
-      EnqueueSpliceRecord(Handle<JSArray>::cast(self), old_length, deleted,
+      EnqueueSpliceRecord(Handle<JSArray>::cast(object), old_length, deleted,
                           new_length - old_length);
     } else {
-      EnqueueChangeRecord(self, "add", name, old_value);
+      EnqueueChangeRecord(object, "add", name, old_value);
     }
   } else if (old_value->IsTheHole()) {
-    EnqueueChangeRecord(self, "reconfigure", name, old_value);
+    EnqueueChangeRecord(object, "reconfigure", name, old_value);
   } else {
-    Handle<Object> new_value = Object::GetElement(isolate, self, index);
+    Handle<Object> new_value = Object::GetElement(isolate, object, index);
     bool value_changed = !old_value->SameValue(*new_value);
     if (old_attributes != new_attributes) {
       if (!value_changed) old_value = isolate->factory()->the_hole_value();
-      EnqueueChangeRecord(self, "reconfigure", name, old_value);
+      EnqueueChangeRecord(object, "reconfigure", name, old_value);
     } else if (value_changed) {
-      EnqueueChangeRecord(self, "update", name, old_value);
+      EnqueueChangeRecord(object, "update", name, old_value);
     }
   }
 
-  return *hresult;
+  return result;
 }
 
 
-MaybeObject* JSObject::SetElementWithoutInterceptor(uint32_t index,
-                                                    Object* value,
-                                                    PropertyAttributes attr,
-                                                    StrictModeFlag strict_mode,
-                                                    bool check_prototype,
-                                                    SetPropertyMode set_mode) {
-  ASSERT(HasDictionaryElements() ||
-         HasDictionaryArgumentsElements() ||
-         (attr & (DONT_DELETE | DONT_ENUM | READ_ONLY)) == 0);
-  Isolate* isolate = GetIsolate();
+Handle<Object> JSObject::SetElementWithoutInterceptor(
+    Handle<JSObject> object,
+    uint32_t index,
+    Handle<Object> value,
+    PropertyAttributes attributes,
+    StrictModeFlag strict_mode,
+    bool check_prototype,
+    SetPropertyMode set_mode) {
+  ASSERT(object->HasDictionaryElements() ||
+         object->HasDictionaryArgumentsElements() ||
+         (attributes & (DONT_DELETE | DONT_ENUM | READ_ONLY)) == 0);
+  Isolate* isolate = object->GetIsolate();
   if (FLAG_trace_external_array_abuse &&
-      IsExternalArrayElementsKind(GetElementsKind())) {
-    CheckArrayAbuse(this, "external elements write", index);
+      IsExternalArrayElementsKind(object->GetElementsKind())) {
+    CheckArrayAbuse(*object, "external elements write", index);
   }
   if (FLAG_trace_js_array_abuse &&
-      !IsExternalArrayElementsKind(GetElementsKind())) {
-    if (IsJSArray()) {
-      CheckArrayAbuse(this, "elements write", index, true);
+      !IsExternalArrayElementsKind(object->GetElementsKind())) {
+    if (object->IsJSArray()) {
+      CheckArrayAbuse(*object, "elements write", index, true);
     }
   }
-  switch (GetElementsKind()) {
+  switch (object->GetElementsKind()) {
     case FAST_SMI_ELEMENTS:
     case FAST_ELEMENTS:
     case FAST_HOLEY_SMI_ELEMENTS:
     case FAST_HOLEY_ELEMENTS:
-      return SetFastElement(index, value, strict_mode, check_prototype);
+      return SetFastElement(object, index, value, strict_mode, check_prototype);
     case FAST_DOUBLE_ELEMENTS:
     case FAST_HOLEY_DOUBLE_ELEMENTS:
-      return SetFastDoubleElement(index, value, strict_mode, check_prototype);
+      return SetFastDoubleElement(object, index, value, strict_mode,
+                                  check_prototype);
     case EXTERNAL_PIXEL_ELEMENTS: {
-      ExternalPixelArray* pixels = ExternalPixelArray::cast(elements());
-      return pixels->SetValue(index, value);
+      ExternalPixelArray* pixels = ExternalPixelArray::cast(object->elements());
+      return handle(pixels->SetValue(index, *value), isolate);
     }
     case EXTERNAL_BYTE_ELEMENTS: {
-      ExternalByteArray* array = ExternalByteArray::cast(elements());
-      return array->SetValue(index, value);
+      Handle<ExternalByteArray> array(
+          ExternalByteArray::cast(object->elements()));
+      return ExternalByteArray::SetValue(array, index, value);
     }
     case EXTERNAL_UNSIGNED_BYTE_ELEMENTS: {
-      ExternalUnsignedByteArray* array =
-          ExternalUnsignedByteArray::cast(elements());
-      return array->SetValue(index, value);
+      Handle<ExternalUnsignedByteArray> array(
+          ExternalUnsignedByteArray::cast(object->elements()));
+      return ExternalUnsignedByteArray::SetValue(array, index, value);
     }
     case EXTERNAL_SHORT_ELEMENTS: {
-      ExternalShortArray* array = ExternalShortArray::cast(elements());
-      return array->SetValue(index, value);
+      Handle<ExternalShortArray> array(ExternalShortArray::cast(
+          object->elements()));
+      return ExternalShortArray::SetValue(array, index, value);
     }
     case EXTERNAL_UNSIGNED_SHORT_ELEMENTS: {
-      ExternalUnsignedShortArray* array =
-          ExternalUnsignedShortArray::cast(elements());
-      return array->SetValue(index, value);
+      Handle<ExternalUnsignedShortArray> array(
+          ExternalUnsignedShortArray::cast(object->elements()));
+      return ExternalUnsignedShortArray::SetValue(array, index, value);
     }
     case EXTERNAL_INT_ELEMENTS: {
-      ExternalIntArray* array = ExternalIntArray::cast(elements());
-      return array->SetValue(index, value);
+      Handle<ExternalIntArray> array(
+          ExternalIntArray::cast(object->elements()));
+      return ExternalIntArray::SetValue(array, index, value);
     }
     case EXTERNAL_UNSIGNED_INT_ELEMENTS: {
-      ExternalUnsignedIntArray* array =
-          ExternalUnsignedIntArray::cast(elements());
-      return array->SetValue(index, value);
+      Handle<ExternalUnsignedIntArray> array(
+          ExternalUnsignedIntArray::cast(object->elements()));
+      return ExternalUnsignedIntArray::SetValue(array, index, value);
     }
     case EXTERNAL_FLOAT_ELEMENTS: {
-      ExternalFloatArray* array = ExternalFloatArray::cast(elements());
-      return array->SetValue(index, value);
+      Handle<ExternalFloatArray> array(
+          ExternalFloatArray::cast(object->elements()));
+      return ExternalFloatArray::SetValue(array, index, value);
     }
     case EXTERNAL_DOUBLE_ELEMENTS: {
-      ExternalDoubleArray* array = ExternalDoubleArray::cast(elements());
-      return array->SetValue(index, value);
+      Handle<ExternalDoubleArray> array(
+          ExternalDoubleArray::cast(object->elements()));
+      return ExternalDoubleArray::SetValue(array, index, value);
     }
     case DICTIONARY_ELEMENTS:
-      return SetDictionaryElement(index, value, attr, strict_mode,
-                                  check_prototype, set_mode);
+      return SetDictionaryElement(object, index, value, attributes, strict_mode,
+                                  check_prototype,
+                                  set_mode);
     case NON_STRICT_ARGUMENTS_ELEMENTS: {
-      FixedArray* parameter_map = FixedArray::cast(elements());
+      Handle<FixedArray> parameter_map(FixedArray::cast(object->elements()));
       uint32_t length = parameter_map->length();
-      Object* probe =
-          (index < length - 2) ? parameter_map->get(index + 2) : NULL;
-      if (probe != NULL && !probe->IsTheHole()) {
-        Context* context = Context::cast(parameter_map->get(0));
-        int context_index = Smi::cast(probe)->value();
+      Handle<Object> probe = index < length - 2 ?
+          Handle<Object>(parameter_map->get(index + 2), isolate) :
+          Handle<Object>();
+      if (!probe.is_null() && !probe->IsTheHole()) {
+        Handle<Context> context(Context::cast(parameter_map->get(0)));
+        int context_index = Handle<Smi>::cast(probe)->value();
         ASSERT(!context->get(context_index)->IsTheHole());
-        context->set(context_index, value);
+        context->set(context_index, *value);
         // Redefining attributes of an aliased element destroys fast aliasing.
-        if (set_mode == SET_PROPERTY || attr == NONE) return value;
+        if (set_mode == SET_PROPERTY || attributes == NONE) return value;
         parameter_map->set_the_hole(index + 2);
         // For elements that are still writable we re-establish slow aliasing.
-        if ((attr & READ_ONLY) == 0) {
-          MaybeObject* maybe_entry =
-              isolate->heap()->AllocateAliasedArgumentsEntry(context_index);
-          if (!maybe_entry->ToObject(&value)) return maybe_entry;
+        if ((attributes & READ_ONLY) == 0) {
+          value = Handle<Object>::cast(
+              isolate->factory()->NewAliasedArgumentsEntry(context_index));
         }
       }
-      FixedArray* arguments = FixedArray::cast(parameter_map->get(1));
+      Handle<FixedArray> arguments(FixedArray::cast(parameter_map->get(1)));
       if (arguments->IsDictionary()) {
-        return SetDictionaryElement(index, value, attr, strict_mode,
-                                    check_prototype, set_mode);
+        return SetDictionaryElement(object, index, value, attributes,
+                                    strict_mode,
+                                    check_prototype,
+                                    set_mode);
       } else {
-        return SetFastElement(index, value, strict_mode, check_prototype);
+        return SetFastElement(object, index, value, strict_mode,
+                              check_prototype);
       }
     }
   }
   // All possible cases have been handled above. Add a return to avoid the
   // complaints from the compiler.
   UNREACHABLE();
-  return isolate->heap()->null_value();
+  return isolate->factory()->null_value();
 }
 
 
@@ -12804,6 +12784,85 @@
 }
 
 
+MaybeObject* AllocationSite::DigestTransitionFeedback(ElementsKind to_kind) {
+  Isolate* isolate = GetIsolate();
+
+  if (SitePointsToLiteral() && transition_info()->IsJSArray()) {
+    JSArray* transition_info = JSArray::cast(this->transition_info());
+    ElementsKind kind = transition_info->GetElementsKind();
+    // if kind is holey ensure that to_kind is as well.
+    if (IsHoleyElementsKind(kind)) {
+      to_kind = GetHoleyElementsKind(to_kind);
+    }
+    if (IsMoreGeneralElementsKindTransition(kind, to_kind)) {
+      // If the array is huge, it's not likely to be defined in a local
+      // function, so we shouldn't make new instances of it very often.
+      uint32_t length = 0;
+      CHECK(transition_info->length()->ToArrayIndex(&length));
+      if (length <= kMaximumArrayBytesToPretransition) {
+        if (FLAG_trace_track_allocation_sites) {
+          bool is_nested = IsNestedSite();
+          PrintF(
+              "AllocationSite: JSArray %p boilerplate %s updated %s->%s\n",
+              reinterpret_cast<void*>(this),
+              is_nested ? "(nested)" : "",
+              ElementsKindToString(kind),
+              ElementsKindToString(to_kind));
+        }
+        MaybeObject* result = transition_info->TransitionElementsKind(to_kind);
+        if (result->IsFailure()) return result;
+        dependent_code()->DeoptimizeDependentCodeGroup(
+            isolate, DependentCode::kAllocationSiteTransitionChangedGroup);
+      }
+    }
+  } else {
+    ElementsKind kind = GetElementsKind();
+    // if kind is holey ensure that to_kind is as well.
+    if (IsHoleyElementsKind(kind)) {
+      to_kind = GetHoleyElementsKind(to_kind);
+    }
+    if (IsMoreGeneralElementsKindTransition(kind, to_kind)) {
+      if (FLAG_trace_track_allocation_sites) {
+        PrintF("AllocationSite: JSArray %p site updated %s->%s\n",
+               reinterpret_cast<void*>(this),
+               ElementsKindToString(kind),
+               ElementsKindToString(to_kind));
+      }
+      SetElementsKind(to_kind);
+      dependent_code()->DeoptimizeDependentCodeGroup(
+          isolate, DependentCode::kAllocationSiteTransitionChangedGroup);
+    }
+  }
+  return this;
+}
+
+
+void AllocationSite::AddDependentCompilationInfo(Reason reason,
+                                                 CompilationInfo* info) {
+  DependentCode::DependencyGroup group = ToDependencyGroup(reason);
+  Handle<DependentCode> dep(dependent_code());
+  Handle<DependentCode> codes =
+      DependentCode::Insert(dep, group, info->object_wrapper());
+  if (*codes != dependent_code()) set_dependent_code(*codes);
+  info->dependencies(group)->Add(Handle<HeapObject>(this), info->zone());
+}
+
+
+void AllocationSite::AddDependentCode(Reason reason, Handle<Code> code) {
+  DependentCode::DependencyGroup group = ToDependencyGroup(reason);
+  Handle<DependentCode> codes = DependentCode::Insert(
+      Handle<DependentCode>(dependent_code()), group, code);
+  if (*codes != dependent_code()) set_dependent_code(*codes);
+}
+
+
+void JSObject::UpdateAllocationSite(Handle<JSObject> object,
+                                    ElementsKind to_kind) {
+  CALL_HEAP_FUNCTION_VOID(object->GetIsolate(),
+      object->UpdateAllocationSite(to_kind));
+}
+
+
 MaybeObject* JSObject::UpdateAllocationSite(ElementsKind to_kind) {
   if (!FLAG_track_allocation_sites || !IsJSArray()) {
     return this;
@@ -12816,54 +12875,11 @@
 
   // Walk through to the Allocation Site
   AllocationSite* site = memento->GetAllocationSite();
-  if (site->SitePointsToLiteral() &&
-      site->transition_info()->IsJSArray()) {
-    JSArray* transition_info = JSArray::cast(site->transition_info());
-    ElementsKind kind = transition_info->GetElementsKind();
-    // if kind is holey ensure that to_kind is as well.
-    if (IsHoleyElementsKind(kind)) {
-      to_kind = GetHoleyElementsKind(to_kind);
-    }
-    if (IsMoreGeneralElementsKindTransition(kind, to_kind)) {
-      // If the array is huge, it's not likely to be defined in a local
-      // function, so we shouldn't make new instances of it very often.
-      uint32_t length = 0;
-      CHECK(transition_info->length()->ToArrayIndex(&length));
-      if (length <= AllocationSite::kMaximumArrayBytesToPretransition) {
-        if (FLAG_trace_track_allocation_sites) {
-          bool is_nested = site->IsNestedSite();
-          PrintF(
-              "AllocationSite: JSArray %p boilerplate %s updated %s->%s\n",
-              reinterpret_cast<void*>(this),
-              is_nested ? "(nested)" : "",
-              ElementsKindToString(kind),
-              ElementsKindToString(to_kind));
-        }
-        return transition_info->TransitionElementsKind(to_kind);
-      }
-    }
-  } else {
-    ElementsKind kind = site->GetElementsKind();
-    // if kind is holey ensure that to_kind is as well.
-    if (IsHoleyElementsKind(kind)) {
-      to_kind = GetHoleyElementsKind(to_kind);
-    }
-    if (IsMoreGeneralElementsKindTransition(kind, to_kind)) {
-      if (FLAG_trace_track_allocation_sites) {
-        PrintF("AllocationSite: JSArray %p site updated %s->%s\n",
-               reinterpret_cast<void*>(this),
-               ElementsKindToString(kind),
-               ElementsKindToString(to_kind));
-      }
-      site->set_transition_info(Smi::FromInt(to_kind));
-    }
-  }
-  return this;
+  return site->DigestTransitionFeedback(to_kind);
 }
 
 
 MaybeObject* JSObject::TransitionElementsKind(ElementsKind to_kind) {
-  ASSERT(!map()->is_observed());
   ElementsKind from_kind = map()->elements_kind();
 
   if (IsFastHoleyElementsKind(from_kind)) {
@@ -12951,6 +12967,14 @@
 }
 
 
+void JSArray::JSArrayUpdateLengthFromIndex(Handle<JSArray> array,
+                                           uint32_t index,
+                                           Handle<Object> value) {
+  CALL_HEAP_FUNCTION_VOID(array->GetIsolate(),
+                          array->JSArrayUpdateLengthFromIndex(index, *value));
+}
+
+
 MaybeObject* JSArray::JSArrayUpdateLengthFromIndex(uint32_t index,
                                                    Object* value) {
   uint32_t old_len = 0;
@@ -13056,8 +13080,7 @@
       }
       break;
     case DICTIONARY_ELEMENTS: {
-      SeededNumberDictionary* dictionary =
-          SeededNumberDictionary::cast(FixedArray::cast(elements()));
+      SeededNumberDictionary* dictionary = element_dictionary();
       *capacity = dictionary->Capacity();
       *used = dictionary->NumberOfElements();
       break;
@@ -13156,8 +13179,7 @@
   *has_smi_only_elements = false;
   if (FLAG_unbox_double_arrays) {
     ASSERT(HasDictionaryElements());
-    SeededNumberDictionary* dictionary =
-        SeededNumberDictionary::cast(elements());
+    SeededNumberDictionary* dictionary = element_dictionary();
     bool found_double = false;
     for (int i = 0; i < dictionary->Capacity(); i++) {
       Object* key = dictionary->KeyAt(i);
@@ -14535,7 +14557,7 @@
     return handle(Smi::FromInt(
         ExternalArray::cast(object->elements())->length()), isolate);
   } else if (!object->HasFastDoubleElements()) {
-    JSObject::EnsureWritableFastElements(object);
+    EnsureWritableFastElements(object);
   }
   ASSERT(object->HasFastSmiOrObjectElements() ||
          object->HasFastDoubleElements());
@@ -14742,12 +14764,31 @@
 }
 
 
+Handle<Object> ExternalByteArray::SetValue(Handle<ExternalByteArray> array,
+                                           uint32_t index,
+                                           Handle<Object> value) {
+  CALL_HEAP_FUNCTION(array->GetIsolate(),
+                     array->SetValue(index, *value),
+                     Object);
+}
+
+
 MaybeObject* ExternalByteArray::SetValue(uint32_t index, Object* value) {
   return ExternalArrayIntSetter<ExternalByteArray, int8_t>
       (GetHeap(), this, index, value);
 }
 
 
+Handle<Object> ExternalUnsignedByteArray::SetValue(
+    Handle<ExternalUnsignedByteArray> array,
+    uint32_t index,
+    Handle<Object> value) {
+  CALL_HEAP_FUNCTION(array->GetIsolate(),
+                     array->SetValue(index, *value),
+                     Object);
+}
+
+
 MaybeObject* ExternalUnsignedByteArray::SetValue(uint32_t index,
                                                  Object* value) {
   return ExternalArrayIntSetter<ExternalUnsignedByteArray, uint8_t>
@@ -14755,6 +14796,16 @@
 }
 
 
+Handle<Object> ExternalShortArray::SetValue(
+    Handle<ExternalShortArray> array,
+    uint32_t index,
+    Handle<Object> value) {
+  CALL_HEAP_FUNCTION(array->GetIsolate(),
+                     array->SetValue(index, *value),
+                     Object);
+}
+
+
 MaybeObject* ExternalShortArray::SetValue(uint32_t index,
                                           Object* value) {
   return ExternalArrayIntSetter<ExternalShortArray, int16_t>
@@ -14762,6 +14813,16 @@
 }
 
 
+Handle<Object> ExternalUnsignedShortArray::SetValue(
+    Handle<ExternalUnsignedShortArray> array,
+    uint32_t index,
+    Handle<Object> value) {
+  CALL_HEAP_FUNCTION(array->GetIsolate(),
+                     array->SetValue(index, *value),
+                     Object);
+}
+
+
 MaybeObject* ExternalUnsignedShortArray::SetValue(uint32_t index,
                                                   Object* value) {
   return ExternalArrayIntSetter<ExternalUnsignedShortArray, uint16_t>
@@ -14769,12 +14830,31 @@
 }
 
 
+Handle<Object> ExternalIntArray::SetValue(Handle<ExternalIntArray> array,
+                                          uint32_t index,
+                                          Handle<Object> value) {
+  CALL_HEAP_FUNCTION(array->GetIsolate(),
+                     array->SetValue(index, *value),
+                     Object);
+}
+
+
 MaybeObject* ExternalIntArray::SetValue(uint32_t index, Object* value) {
   return ExternalArrayIntSetter<ExternalIntArray, int32_t>
       (GetHeap(), this, index, value);
 }
 
 
+Handle<Object> ExternalUnsignedIntArray::SetValue(
+    Handle<ExternalUnsignedIntArray> array,
+    uint32_t index,
+    Handle<Object> value) {
+  CALL_HEAP_FUNCTION(array->GetIsolate(),
+                     array->SetValue(index, *value),
+                     Object);
+}
+
+
 MaybeObject* ExternalUnsignedIntArray::SetValue(uint32_t index, Object* value) {
   uint32_t cast_value = 0;
   Heap* heap = GetHeap();
@@ -14796,6 +14876,15 @@
 }
 
 
+Handle<Object> ExternalFloatArray::SetValue(Handle<ExternalFloatArray> array,
+                                            uint32_t index,
+                                            Handle<Object> value) {
+  CALL_HEAP_FUNCTION(array->GetIsolate(),
+                     array->SetValue(index, *value),
+                     Object);
+}
+
+
 MaybeObject* ExternalFloatArray::SetValue(uint32_t index, Object* value) {
   float cast_value = static_cast<float>(OS::nan_value());
   Heap* heap = GetHeap();
@@ -14817,6 +14906,15 @@
 }
 
 
+Handle<Object> ExternalDoubleArray::SetValue(Handle<ExternalDoubleArray> array,
+                                            uint32_t index,
+                                            Handle<Object> value) {
+  CALL_HEAP_FUNCTION(array->GetIsolate(),
+                     array->SetValue(index, *value),
+                     Object);
+}
+
+
 MaybeObject* ExternalDoubleArray::SetValue(uint32_t index, Object* value) {
   double double_value = OS::nan_value();
   Heap* heap = GetHeap();
@@ -15438,6 +15536,15 @@
   }
 }
 
+Handle<SeededNumberDictionary> SeededNumberDictionary::AddNumberEntry(
+    Handle<SeededNumberDictionary> dictionary,
+    uint32_t key,
+    Handle<Object> value,
+    PropertyDetails details) {
+  CALL_HEAP_FUNCTION(dictionary->GetIsolate(),
+                     dictionary->AddNumberEntry(key, *value, details),
+                     SeededNumberDictionary);
+}
 
 MaybeObject* SeededNumberDictionary::AddNumberEntry(uint32_t key,
                                                     Object* value,
diff --git a/src/objects.h b/src/objects.h
index 2fc1d23..25c2210 100644
--- a/src/objects.h
+++ b/src/objects.h
@@ -1494,6 +1494,7 @@
 
   // Return the object's prototype (might be Heap::null_value()).
   Object* GetPrototype(Isolate* isolate);
+  Map* GetMarkerMap(Isolate* isolate);
 
   // Returns the permanent hash code associated with this object. May return
   // undefined if not yet created.
@@ -2338,24 +2339,10 @@
   AccessorPair* GetLocalPropertyAccessorPair(Name* name);
   AccessorPair* GetLocalElementAccessorPair(uint32_t index);
 
-  MUST_USE_RESULT MaybeObject* SetFastElement(uint32_t index,
-                                              Object* value,
-                                              StrictModeFlag strict_mode,
-                                              bool check_prototype);
-
-  MUST_USE_RESULT MaybeObject* SetDictionaryElement(
-      uint32_t index,
-      Object* value,
-      PropertyAttributes attributes,
-      StrictModeFlag strict_mode,
-      bool check_prototype,
-      SetPropertyMode set_mode = SET_PROPERTY);
-
-  MUST_USE_RESULT MaybeObject* SetFastDoubleElement(
-      uint32_t index,
-      Object* value,
-      StrictModeFlag strict_mode,
-      bool check_prototype = true);
+  static Handle<Object> SetFastElement(Handle<JSObject> object, uint32_t index,
+                                       Handle<Object> value,
+                                       StrictModeFlag strict_mode,
+                                       bool check_prototype);
 
   static Handle<Object> SetOwnElement(Handle<JSObject> object,
                                       uint32_t index,
@@ -2367,15 +2354,6 @@
       Handle<JSObject> object,
       uint32_t index,
       Handle<Object> value,
-      PropertyAttributes attr,
-      StrictModeFlag strict_mode,
-      bool check_prototype = true,
-      SetPropertyMode set_mode = SET_PROPERTY);
-
-  // A Failure object is returned if GC is needed.
-  MUST_USE_RESULT MaybeObject* SetElement(
-      uint32_t index,
-      Object* value,
       PropertyAttributes attributes,
       StrictModeFlag strict_mode,
       bool check_prototype = true,
@@ -2392,6 +2370,11 @@
     kDontAllowSmiElements
   };
 
+  static Handle<FixedArray> SetFastElementsCapacityAndLength(
+      Handle<JSObject> object,
+      int capacity,
+      int length,
+      SetFastElementsCapacitySmiMode smi_mode);
   // Replace the elements' backing store with fast elements of the given
   // capacity.  Update the length for JSArrays.  Returns the new backing
   // store.
@@ -2399,6 +2382,10 @@
       int capacity,
       int length,
       SetFastElementsCapacitySmiMode smi_mode);
+  static void SetFastDoubleElementsCapacityAndLength(
+      Handle<JSObject> object,
+      int capacity,
+      int length);
   MUST_USE_RESULT MaybeObject* SetFastDoubleElementsCapacityAndLength(
       int capacity,
       int length);
@@ -2468,7 +2455,6 @@
                                      ElementsKind to_kind);
 
   MUST_USE_RESULT MaybeObject* TransitionElementsKind(ElementsKind to_kind);
-  MUST_USE_RESULT MaybeObject* UpdateAllocationSite(ElementsKind to_kind);
 
   // TODO(mstarzinger): Both public because of ConvertAnsSetLocalProperty().
   static void MigrateToMap(Handle<JSObject> object, Handle<Map> new_map);
@@ -2659,6 +2645,10 @@
   friend class JSReceiver;
   friend class Object;
 
+  static void UpdateAllocationSite(Handle<JSObject> object,
+                                   ElementsKind to_kind);
+  MUST_USE_RESULT MaybeObject* UpdateAllocationSite(ElementsKind to_kind);
+
   // Used from Object::GetProperty().
   static Handle<Object> GetPropertyWithFailedAccessCheck(
       Handle<JSObject> object,
@@ -2686,25 +2676,42 @@
       Handle<Object> value,
       Handle<JSObject> holder,
       StrictModeFlag strict_mode);
-  MUST_USE_RESULT MaybeObject* SetElementWithInterceptor(
+  static Handle<Object> SetElementWithInterceptor(
+      Handle<JSObject> object,
       uint32_t index,
-      Object* value,
+      Handle<Object> value,
       PropertyAttributes attributes,
       StrictModeFlag strict_mode,
       bool check_prototype,
       SetPropertyMode set_mode);
-  MUST_USE_RESULT MaybeObject* SetElementWithoutInterceptor(
+  static Handle<Object> SetElementWithoutInterceptor(
+      Handle<JSObject> object,
       uint32_t index,
-      Object* value,
+      Handle<Object> value,
       PropertyAttributes attributes,
       StrictModeFlag strict_mode,
       bool check_prototype,
       SetPropertyMode set_mode);
-  MUST_USE_RESULT MaybeObject* SetElementWithCallbackSetterInPrototypes(
+  static Handle<Object> SetElementWithCallbackSetterInPrototypes(
+      Handle<JSObject> object,
       uint32_t index,
-      Object* value,
+      Handle<Object> value,
       bool* found,
       StrictModeFlag strict_mode);
+  static Handle<Object> SetDictionaryElement(
+      Handle<JSObject> object,
+      uint32_t index,
+      Handle<Object> value,
+      PropertyAttributes attributes,
+      StrictModeFlag strict_mode,
+      bool check_prototype,
+      SetPropertyMode set_mode = SET_PROPERTY);
+  static Handle<Object> SetFastDoubleElement(
+      Handle<JSObject> object,
+      uint32_t index,
+      Handle<Object> value,
+      StrictModeFlag strict_mode,
+      bool check_prototype = true);
 
   // Searches the prototype chain for property 'name'. If it is found and
   // has a setter, invoke it and set '*done' to true. If it is found and is
@@ -3966,6 +3973,11 @@
 
   // Type specific at put (default NONE attributes is used when adding).
   MUST_USE_RESULT MaybeObject* AtNumberPut(uint32_t key, Object* value);
+  MUST_USE_RESULT static Handle<SeededNumberDictionary> AddNumberEntry(
+      Handle<SeededNumberDictionary> dictionary,
+      uint32_t key,
+      Handle<Object> value,
+      PropertyDetails details);
   MUST_USE_RESULT MaybeObject* AddNumberEntry(uint32_t key,
                                               Object* value,
                                               PropertyDetails details);
@@ -4590,6 +4602,10 @@
   MUST_USE_RESULT inline MaybeObject* get(int index);
   inline void set(int index, int8_t value);
 
+  static Handle<Object> SetValue(Handle<ExternalByteArray> array,
+                                 uint32_t index,
+                                 Handle<Object> value);
+
   // This accessor applies the correct conversion from Smi, HeapNumber
   // and undefined.
   MUST_USE_RESULT MaybeObject* SetValue(uint32_t index, Object* value);
@@ -4613,6 +4629,10 @@
   MUST_USE_RESULT inline MaybeObject* get(int index);
   inline void set(int index, uint8_t value);
 
+  static Handle<Object> SetValue(Handle<ExternalUnsignedByteArray> array,
+                                 uint32_t index,
+                                 Handle<Object> value);
+
   // This accessor applies the correct conversion from Smi, HeapNumber
   // and undefined.
   MUST_USE_RESULT MaybeObject* SetValue(uint32_t index, Object* value);
@@ -4636,6 +4656,10 @@
   MUST_USE_RESULT inline MaybeObject* get(int index);
   inline void set(int index, int16_t value);
 
+  static Handle<Object> SetValue(Handle<ExternalShortArray> array,
+                                 uint32_t index,
+                                 Handle<Object> value);
+
   // This accessor applies the correct conversion from Smi, HeapNumber
   // and undefined.
   MUST_USE_RESULT MaybeObject* SetValue(uint32_t index, Object* value);
@@ -4659,6 +4683,10 @@
   MUST_USE_RESULT inline MaybeObject* get(int index);
   inline void set(int index, uint16_t value);
 
+  static Handle<Object> SetValue(Handle<ExternalUnsignedShortArray> array,
+                                 uint32_t index,
+                                 Handle<Object> value);
+
   // This accessor applies the correct conversion from Smi, HeapNumber
   // and undefined.
   MUST_USE_RESULT MaybeObject* SetValue(uint32_t index, Object* value);
@@ -4682,6 +4710,10 @@
   MUST_USE_RESULT inline MaybeObject* get(int index);
   inline void set(int index, int32_t value);
 
+  static Handle<Object> SetValue(Handle<ExternalIntArray> array,
+                                 uint32_t index,
+                                 Handle<Object> value);
+
   // This accessor applies the correct conversion from Smi, HeapNumber
   // and undefined.
   MUST_USE_RESULT MaybeObject* SetValue(uint32_t index, Object* value);
@@ -4705,6 +4737,10 @@
   MUST_USE_RESULT inline MaybeObject* get(int index);
   inline void set(int index, uint32_t value);
 
+  static Handle<Object> SetValue(Handle<ExternalUnsignedIntArray> array,
+                                 uint32_t index,
+                                 Handle<Object> value);
+
   // This accessor applies the correct conversion from Smi, HeapNumber
   // and undefined.
   MUST_USE_RESULT MaybeObject* SetValue(uint32_t index, Object* value);
@@ -4728,6 +4764,10 @@
   MUST_USE_RESULT inline MaybeObject* get(int index);
   inline void set(int index, float value);
 
+  static Handle<Object> SetValue(Handle<ExternalFloatArray> array,
+                                 uint32_t index,
+                                 Handle<Object> value);
+
   // This accessor applies the correct conversion from Smi, HeapNumber
   // and undefined.
   MUST_USE_RESULT MaybeObject* SetValue(uint32_t index, Object* value);
@@ -4751,6 +4791,10 @@
   MUST_USE_RESULT inline MaybeObject* get(int index);
   inline void set(int index, double value);
 
+  static Handle<Object> SetValue(Handle<ExternalDoubleArray> array,
+                                 uint32_t index,
+                                 Handle<Object> value);
+
   // This accessor applies the correct conversion from Smi, HeapNumber
   // and undefined.
   MUST_USE_RESULT MaybeObject* SetValue(uint32_t index, Object* value);
@@ -5253,9 +5297,9 @@
   static inline Flags ComputeMonomorphicFlags(
       Kind kind,
       ExtraICState extra_ic_state = kNoExtraICState,
+      InlineCacheHolderFlag holder = OWN_MAP,
       StubType type = NORMAL,
-      int argc = -1,
-      InlineCacheHolderFlag holder = OWN_MAP);
+      int argc = -1);
 
   static inline InlineCacheState ExtractICStateFromFlags(Flags flags);
   static inline StubType ExtractTypeFromFlags(Flags flags);
@@ -5565,7 +5609,13 @@
     // Group of code that depends on global property values in property cells
     // not being changed.
     kPropertyCellChangedGroup,
-    kGroupCount = kPropertyCellChangedGroup + 1
+    // Group of code that depends on tenuring information in AllocationSites
+    // not being changed.
+    kAllocationSiteTenuringChangedGroup,
+    // Group of code that depends on element transition information in
+    // AllocationSites not being changed.
+    kAllocationSiteTransitionChangedGroup,
+    kGroupCount = kAllocationSiteTransitionChangedGroup + 1
   };
 
   // Array for holding the index of the first code object of each group.
@@ -5665,7 +5715,7 @@
   class FunctionWithPrototype:      public BitField<bool, 23,  1> {};
   class DictionaryMap:              public BitField<bool, 24,  1> {};
   class OwnsDescriptors:            public BitField<bool, 25,  1> {};
-  class IsObserved:                 public BitField<bool, 26,  1> {};
+  class HasInstanceCallHandler:     public BitField<bool, 26,  1> {};
   class Deprecated:                 public BitField<bool, 27,  1> {};
   class IsFrozen:                   public BitField<bool, 28,  1> {};
   class IsUnstable:                 public BitField<bool, 29,  1> {};
@@ -5728,12 +5778,12 @@
   }
 
   // Tells whether the instance has a call-as-function handler.
-  inline void set_has_instance_call_handler() {
-    set_bit_field(bit_field() | (1 << kHasInstanceCallHandler));
+  inline void set_is_observed() {
+    set_bit_field(bit_field() | (1 << kIsObserved));
   }
 
-  inline bool has_instance_call_handler() {
-    return ((1 << kHasInstanceCallHandler) & bit_field()) != 0;
+  inline bool is_observed() {
+    return ((1 << kIsObserved) & bit_field()) != 0;
   }
 
   inline void set_is_extensible(bool value);
@@ -5742,10 +5792,6 @@
   inline void set_elements_kind(ElementsKind elements_kind) {
     ASSERT(elements_kind < kElementsKindCount);
     ASSERT(kElementsKindCount <= (1 << kElementsKindBitCount));
-    ASSERT(!is_observed() ||
-           elements_kind == DICTIONARY_ELEMENTS ||
-           elements_kind == NON_STRICT_ARGUMENTS_ELEMENTS ||
-           IsExternalArrayElementsKind(elements_kind));
     set_bit_field2((bit_field2() & ~kElementsKindMask) |
         (elements_kind << kElementsKindShift));
     ASSERT(this->elements_kind() == elements_kind);
@@ -5998,8 +6044,8 @@
 
   inline bool owns_descriptors();
   inline void set_owns_descriptors(bool is_shared);
-  inline bool is_observed();
-  inline void set_is_observed(bool is_observed);
+  inline bool has_instance_call_handler();
+  inline void set_has_instance_call_handler();
   inline void freeze();
   inline bool is_frozen();
   inline void mark_unstable();
@@ -6258,7 +6304,7 @@
   static const int kHasNamedInterceptor = 3;
   static const int kHasIndexedInterceptor = 4;
   static const int kIsUndetectable = 5;
-  static const int kHasInstanceCallHandler = 6;
+  static const int kIsObserved = 6;
   static const int kIsAccessCheckNeeded = 7;
 
   // Bit positions for bit field 2
@@ -8087,13 +8133,31 @@
   // This method is expensive, it should only be called for reporting.
   bool IsNestedSite();
 
+  class ElementsKindBits:       public BitField<ElementsKind, 0,  15> {};
+  class UnusedBits:             public BitField<int,          15, 14> {};
+  class DoNotInlineBit:         public BitField<bool,         29,  1> {};
+
   ElementsKind GetElementsKind() {
     ASSERT(!SitePointsToLiteral());
-    return static_cast<ElementsKind>(Smi::cast(transition_info())->value());
+    int value = Smi::cast(transition_info())->value();
+    return ElementsKindBits::decode(value);
   }
 
   void SetElementsKind(ElementsKind kind) {
-    set_transition_info(Smi::FromInt(static_cast<int>(kind)));
+    int value = Smi::cast(transition_info())->value();
+    set_transition_info(Smi::FromInt(ElementsKindBits::update(value, kind)),
+                        SKIP_WRITE_BARRIER);
+  }
+
+  bool CanInlineCall() {
+    int value = Smi::cast(transition_info())->value();
+    return DoNotInlineBit::decode(value) == 0;
+  }
+
+  void SetDoNotInlineCall() {
+    int value = Smi::cast(transition_info())->value();
+    set_transition_info(Smi::FromInt(DoNotInlineBit::update(value, true)),
+                        SKIP_WRITE_BARRIER);
   }
 
   bool SitePointsToLiteral() {
@@ -8103,6 +8167,16 @@
     return transition_info()->IsJSArray() || transition_info()->IsJSObject();
   }
 
+  MaybeObject* DigestTransitionFeedback(ElementsKind to_kind);
+
+  enum Reason {
+    TENURING,
+    TRANSITIONS
+  };
+
+  void AddDependentCompilationInfo(Reason reason, CompilationInfo* info);
+  void AddDependentCode(Reason reason, Handle<Code> code);
+
   DECLARE_PRINTER(AllocationSite)
   DECLARE_VERIFIER(AllocationSite)
 
@@ -8123,6 +8197,7 @@
                               kSize> BodyDescriptor;
 
  private:
+  inline DependentCode::DependencyGroup ToDependencyGroup(Reason reason);
   DISALLOW_IMPLICIT_CONSTRUCTORS(AllocationSite);
 };
 
@@ -9716,6 +9791,10 @@
   // is set to a smi. This matches the set function on FixedArray.
   inline void set_length(Smi* length);
 
+  static void JSArrayUpdateLengthFromIndex(Handle<JSArray> array,
+                                           uint32_t index,
+                                           Handle<Object> value);
+
   MUST_USE_RESULT MaybeObject* JSArrayUpdateLengthFromIndex(uint32_t index,
                                                             Object* value);
 
diff --git a/src/runtime.cc b/src/runtime.cc
index 80b6827..2cf033c 100644
--- a/src/runtime.cc
+++ b/src/runtime.cc
@@ -9821,22 +9821,22 @@
 // array.  Returns true if the element was pushed on the stack and
 // false otherwise.
 RUNTIME_FUNCTION(MaybeObject*, Runtime_PushIfAbsent) {
-  SealHandleScope shs(isolate);
+  HandleScope scope(isolate);
   ASSERT(args.length() == 2);
-  CONVERT_ARG_CHECKED(JSArray, array, 0);
-  CONVERT_ARG_CHECKED(JSReceiver, element, 1);
+  CONVERT_ARG_HANDLE_CHECKED(JSArray, array, 0);
+  CONVERT_ARG_HANDLE_CHECKED(JSReceiver, element, 1);
   RUNTIME_ASSERT(array->HasFastSmiOrObjectElements());
   int length = Smi::cast(array->length())->value();
   FixedArray* elements = FixedArray::cast(array->elements());
   for (int i = 0; i < length; i++) {
-    if (elements->get(i) == element) return isolate->heap()->false_value();
+    if (elements->get(i) == *element) return isolate->heap()->false_value();
   }
-  Object* obj;
+
   // Strict not needed. Used for cycle detection in Array join implementation.
-  { MaybeObject* maybe_obj =
-        array->SetFastElement(length, element, kNonStrictMode, true);
-    if (!maybe_obj->ToObject(&obj)) return maybe_obj;
-  }
+  RETURN_IF_EMPTY_HANDLE(isolate, JSObject::SetFastElement(array, length,
+                                                           element,
+                                                           kNonStrictMode,
+                                                           true));
   return isolate->heap()->true_value();
 }
 
@@ -14688,7 +14688,7 @@
 
 static MaybeObject* ArrayConstructorCommon(Isolate* isolate,
                                            Handle<JSFunction> constructor,
-                                           Handle<Object> type_info,
+                                           Handle<AllocationSite> site,
                                            Arguments* caller_args) {
   bool holey = false;
   bool can_use_type_feedback = true;
@@ -14710,14 +14710,7 @@
 
   JSArray* array;
   MaybeObject* maybe_array;
-  if (!type_info.is_null() &&
-      *type_info != isolate->heap()->undefined_value() &&
-      Cell::cast(*type_info)->value()->IsAllocationSite() &&
-      can_use_type_feedback) {
-    Handle<Cell> cell = Handle<Cell>::cast(type_info);
-    Handle<AllocationSite> site = Handle<AllocationSite>(
-        AllocationSite::cast(cell->value()), isolate);
-    ASSERT(!site->SitePointsToLiteral());
+  if (!site.is_null() && can_use_type_feedback) {
     ElementsKind to_kind = site->GetElementsKind();
     if (holey && !IsFastHoleyElementsKind(to_kind)) {
       to_kind = GetHoleyElementsKind(to_kind);
@@ -14743,8 +14736,17 @@
   maybe_array = isolate->heap()->AllocateJSArrayStorage(array, 0, 0,
       DONT_INITIALIZE_ARRAY_ELEMENTS);
   if (maybe_array->IsFailure()) return maybe_array;
+  ElementsKind old_kind = array->GetElementsKind();
   maybe_array = ArrayConstructInitializeElements(array, caller_args);
   if (maybe_array->IsFailure()) return maybe_array;
+  if (!site.is_null() &&
+      (old_kind != array->GetElementsKind() ||
+       !can_use_type_feedback)) {
+    // The arguments passed in caused a transition. This kind of complexity
+    // can't be dealt with in the inlined hydrogen array constructor case.
+    // We must mark the allocationsite as un-inlinable.
+    site->SetDoNotInlineCall();
+  }
   return array;
 }
 
@@ -14771,9 +14773,19 @@
     ASSERT(arg_count == caller_args->length());
   }
 #endif
+
+  Handle<AllocationSite> site;
+  if (!type_info.is_null() &&
+      *type_info != isolate->heap()->undefined_value() &&
+      Cell::cast(*type_info)->value()->IsAllocationSite()) {
+    site = Handle<AllocationSite>(
+        AllocationSite::cast(Cell::cast(*type_info)->value()), isolate);
+    ASSERT(!site->SitePointsToLiteral());
+  }
+
   return ArrayConstructorCommon(isolate,
                                 constructor,
-                                type_info,
+                                site,
                                 caller_args);
 }
 
@@ -14796,7 +14808,7 @@
 #endif
   return ArrayConstructorCommon(isolate,
                                 constructor,
-                                Handle<Object>::null(),
+                                Handle<AllocationSite>::null(),
                                 caller_args);
 }
 
diff --git a/src/spaces.cc b/src/spaces.cc
index 1e0d9bc..ee19a02 100644
--- a/src/spaces.cc
+++ b/src/spaces.cc
@@ -1350,7 +1350,6 @@
       }
     }
   }
-  allocation_info_.set_limit(to_space_.page_high());
   ASSERT_SEMISPACE_ALLOCATION_INFO(allocation_info_, to_space_);
 }
 
@@ -1359,14 +1358,7 @@
   MemoryChunk::UpdateHighWaterMark(allocation_info_.top());
   allocation_info_.set_top(to_space_.page_low());
   allocation_info_.set_limit(to_space_.page_high());
-
-  // Lower limit during incremental marking.
-  if (heap()->incremental_marking()->IsMarking() &&
-      inline_allocation_limit_step() != 0) {
-    Address new_limit =
-        allocation_info_.top() + inline_allocation_limit_step();
-    allocation_info_.set_limit(Min(new_limit, allocation_info_.limit()));
-  }
+  UpdateInlineAllocationLimit(0);
   ASSERT_SEMISPACE_ALLOCATION_INFO(allocation_info_, to_space_);
 }
 
@@ -1383,6 +1375,26 @@
 }
 
 
+void NewSpace::UpdateInlineAllocationLimit(int size_in_bytes) {
+  if (heap()->inline_allocation_disabled()) {
+    // Lowest limit when linear allocation was disabled.
+    Address high = to_space_.page_high();
+    Address new_top = allocation_info_.top() + size_in_bytes;
+    allocation_info_.set_limit(Min(new_top, high));
+  } else if (inline_allocation_limit_step() == 0) {
+    // Normal limit is the end of the current page.
+    allocation_info_.set_limit(to_space_.page_high());
+  } else {
+    // Lower limit during incremental marking.
+    Address high = to_space_.page_high();
+    Address new_top = allocation_info_.top() + size_in_bytes;
+    Address new_limit = new_top + inline_allocation_limit_step_;
+    allocation_info_.set_limit(Min(new_limit, high));
+  }
+  ASSERT_SEMISPACE_ALLOCATION_INFO(allocation_info_, to_space_);
+}
+
+
 bool NewSpace::AddFreshPage() {
   Address top = allocation_info_.top();
   if (NewSpacePage::IsAtStart(top)) {
@@ -1417,18 +1429,16 @@
 
 MaybeObject* NewSpace::SlowAllocateRaw(int size_in_bytes) {
   Address old_top = allocation_info_.top();
-  Address new_top = old_top + size_in_bytes;
   Address high = to_space_.page_high();
   if (allocation_info_.limit() < high) {
-    // Incremental marking has lowered the limit to get a
-    // chance to do a step.
-    Address new_limit = Min(
-        allocation_info_.limit() + inline_allocation_limit_step_,
-        high);
-    allocation_info_.set_limit(new_limit);
+    // Either the limit has been lowered because linear allocation was disabled
+    // or because incremental marking wants to get a chance to do a step. Set
+    // the new limit accordingly.
+    Address new_top = old_top + size_in_bytes;
     int bytes_allocated = static_cast<int>(new_top - top_on_previous_step_);
     heap()->incremental_marking()->Step(
         bytes_allocated, IncrementalMarking::GC_VIA_STACK_GUARD);
+    UpdateInlineAllocationLimit(size_in_bytes);
     top_on_previous_step_ = new_top;
     return AllocateRaw(size_in_bytes);
   } else if (AddFreshPage()) {
@@ -2374,7 +2384,7 @@
   int new_node_size = 0;
   FreeListNode* new_node = FindNodeFor(size_in_bytes, &new_node_size);
   if (new_node == NULL) {
-    owner_->SetTop(NULL, NULL);
+    owner_->SetTopAndLimit(NULL, NULL);
     return NULL;
   }
 
@@ -2399,26 +2409,31 @@
   // a little of this again immediately - see below.
   owner_->Allocate(new_node_size);
 
-  if (bytes_left > kThreshold &&
-      owner_->heap()->incremental_marking()->IsMarkingIncomplete() &&
-      FLAG_incremental_marking_steps) {
+  if (owner_->heap()->inline_allocation_disabled()) {
+    // Keep the linear allocation area empty if requested to do so, just
+    // return area back to the free list instead.
+    owner_->Free(new_node->address() + size_in_bytes, bytes_left);
+    ASSERT(owner_->top() == NULL && owner_->limit() == NULL);
+  } else if (bytes_left > kThreshold &&
+             owner_->heap()->incremental_marking()->IsMarkingIncomplete() &&
+             FLAG_incremental_marking_steps) {
     int linear_size = owner_->RoundSizeDownToObjectAlignment(kThreshold);
     // We don't want to give too large linear areas to the allocator while
     // incremental marking is going on, because we won't check again whether
     // we want to do another increment until the linear area is used up.
     owner_->Free(new_node->address() + size_in_bytes + linear_size,
                  new_node_size - size_in_bytes - linear_size);
-    owner_->SetTop(new_node->address() + size_in_bytes,
-                   new_node->address() + size_in_bytes + linear_size);
+    owner_->SetTopAndLimit(new_node->address() + size_in_bytes,
+                           new_node->address() + size_in_bytes + linear_size);
   } else if (bytes_left > 0) {
     // Normally we give the rest of the node to the allocator as its new
     // linear allocation area.
-    owner_->SetTop(new_node->address() + size_in_bytes,
-                   new_node->address() + new_node_size);
+    owner_->SetTopAndLimit(new_node->address() + size_in_bytes,
+                           new_node->address() + new_node_size);
   } else {
     // TODO(gc) Try not freeing linear allocation region when bytes_left
     // are zero.
-    owner_->SetTop(NULL, NULL);
+    owner_->SetTopAndLimit(NULL, NULL);
   }
 
   return new_node;
@@ -2507,11 +2522,7 @@
 void PagedSpace::PrepareForMarkCompact() {
   // We don't have a linear allocation area while sweeping.  It will be restored
   // on the first allocation after the sweep.
-  // Mark the old linear allocation area with a free space map so it can be
-  // skipped when scanning the heap.
-  int old_linear_size = static_cast<int>(limit() - top());
-  Free(top(), old_linear_size);
-  SetTop(NULL, NULL);
+  EmptyAllocationInfo();
 
   // Stop lazy sweeping and clear marking bits for unswept pages.
   if (first_unswept_page_ != NULL) {
diff --git a/src/spaces.h b/src/spaces.h
index 83793e3..ee11b6b 100644
--- a/src/spaces.h
+++ b/src/spaces.h
@@ -1783,7 +1783,7 @@
   }
 
   // Set space allocation info.
-  void SetTop(Address top, Address limit) {
+  void SetTopAndLimit(Address top, Address limit) {
     ASSERT(top == limit ||
            Page::FromAddress(top) == Page::FromAddress(limit - 1));
     MemoryChunk::UpdateHighWaterMark(allocation_info_.top());
@@ -1791,6 +1791,15 @@
     allocation_info_.set_limit(limit);
   }
 
+  // Empty space allocation info, returning unused area to free list.
+  void EmptyAllocationInfo() {
+    // Mark the old linear allocation area with a free space map so it can be
+    // skipped when scanning the heap.
+    int old_linear_size = static_cast<int>(limit() - top());
+    Free(top(), old_linear_size);
+    SetTopAndLimit(NULL, NULL);
+  }
+
   void Allocate(int bytes) {
     accounting_stats_.AllocateBytes(bytes);
   }
@@ -2478,16 +2487,10 @@
   // Reset the allocation pointer to the beginning of the active semispace.
   void ResetAllocationInfo();
 
+  void UpdateInlineAllocationLimit(int size_in_bytes);
   void LowerInlineAllocationLimit(intptr_t step) {
     inline_allocation_limit_step_ = step;
-    if (step == 0) {
-      allocation_info_.set_limit(to_space_.page_high());
-    } else {
-      Address new_limit = Min(
-          allocation_info_.top() + inline_allocation_limit_step_,
-          allocation_info_.limit());
-      allocation_info_.set_limit(new_limit);
-    }
+    UpdateInlineAllocationLimit(0);
     top_on_previous_step_ = allocation_info_.top();
   }
 
diff --git a/src/stub-cache.cc b/src/stub-cache.cc
index 1bc4b13..7a11c23 100644
--- a/src/stub-cache.cc
+++ b/src/stub-cache.cc
@@ -102,8 +102,10 @@
 Handle<Code> StubCache::FindIC(Handle<Name> name,
                                Handle<Map> stub_holder_map,
                                Code::Kind kind,
-                               Code::ExtraICState extra_state) {
-  Code::Flags flags = Code::ComputeMonomorphicFlags(kind, extra_state);
+                               Code::ExtraICState extra_state,
+                               InlineCacheHolderFlag cache_holder) {
+  Code::Flags flags = Code::ComputeMonomorphicFlags(
+      kind, extra_state, cache_holder);
   Handle<Object> probe(stub_holder_map->FindInCodeCache(*name, flags),
                        isolate_);
   if (probe->IsCode()) return Handle<Code>::cast(probe);
@@ -111,17 +113,10 @@
 }
 
 
-Handle<Code> StubCache::FindIC(Handle<Name> name,
-                               Handle<JSObject> stub_holder,
-                               Code::Kind kind,
-                               Code::ExtraICState extra_ic_state) {
-  return FindIC(name, Handle<Map>(stub_holder->map()), kind, extra_ic_state);
-}
-
-
 Handle<Code> StubCache::FindHandler(Handle<Name> name,
-                                    Handle<JSObject> receiver,
+                                    Handle<HeapObject> stub_holder,
                                     Code::Kind kind,
+                                    InlineCacheHolderFlag cache_holder,
                                     StrictModeFlag strict_mode) {
   Code::ExtraICState extra_ic_state = Code::kNoExtraICState;
   if (kind == Code::STORE_IC || kind == Code::KEYED_STORE_IC) {
@@ -129,28 +124,36 @@
         STANDARD_STORE, strict_mode);
   }
   Code::Flags flags = Code::ComputeMonomorphicFlags(
-      Code::HANDLER, extra_ic_state, Code::NORMAL, kind);
-  Handle<Object> probe(receiver->map()->FindInCodeCache(*name, flags),
+      Code::HANDLER, extra_ic_state, cache_holder, Code::NORMAL, kind);
+
+  Handle<Object> probe(stub_holder->map()->FindInCodeCache(*name, flags),
                        isolate_);
   if (probe->IsCode()) return Handle<Code>::cast(probe);
   return Handle<Code>::null();
 }
 
 
-Handle<Code> StubCache::ComputeMonomorphicIC(Handle<HeapObject> receiver,
+Handle<Code> StubCache::ComputeMonomorphicIC(Handle<Name> name,
+                                             Handle<Object> object,
                                              Handle<Code> handler,
-                                             Handle<Name> name,
                                              StrictModeFlag strict_mode) {
   Code::Kind kind = handler->handler_kind();
-  Handle<Map> map(receiver->map());
-  Handle<Code> ic = FindIC(name, map, kind, strict_mode);
+  // Use the same cache holder for the IC as for the handler.
+  InlineCacheHolderFlag cache_holder =
+      Code::ExtractCacheHolderFromFlags(handler->flags());
+  Handle<HeapObject> stub_holder(IC::GetCodeCacheHolder(
+      isolate(), *object, cache_holder));
+  Handle<Map> stub_holder_map(stub_holder->map());
+  Handle<Code> ic = FindIC(
+      name, stub_holder_map, kind, strict_mode, cache_holder);
   if (!ic.is_null()) return ic;
 
+  Handle<Map> map(object->GetMarkerMap(isolate()));
   if (kind == Code::LOAD_IC) {
-    LoadStubCompiler ic_compiler(isolate());
+    LoadStubCompiler ic_compiler(isolate(), cache_holder);
     ic = ic_compiler.CompileMonomorphicIC(map, handler, name);
   } else if (kind == Code::KEYED_LOAD_IC) {
-    KeyedLoadStubCompiler ic_compiler(isolate());
+    KeyedLoadStubCompiler ic_compiler(isolate(), cache_holder);
     ic = ic_compiler.CompileMonomorphicIC(map, handler, name);
   } else if (kind == Code::STORE_IC) {
     StoreStubCompiler ic_compiler(isolate(), strict_mode);
@@ -161,13 +164,16 @@
     ic = ic_compiler.CompileMonomorphicIC(map, handler, name);
   }
 
-  HeapObject::UpdateMapCodeCache(receiver, name, ic);
+  HeapObject::UpdateMapCodeCache(stub_holder, name, ic);
   return ic;
 }
 
 
 Handle<Code> StubCache::ComputeLoadNonexistent(Handle<Name> name,
-                                               Handle<JSObject> receiver) {
+                                               Handle<Object> object) {
+  InlineCacheHolderFlag cache_holder = IC::GetCodeCacheForObject(*object);
+  Handle<HeapObject> stub_holder(IC::GetCodeCacheHolder(
+      isolate(), *object, cache_holder));
   // If no global objects are present in the prototype chain, the load
   // nonexistent IC stub can be shared for all names for a given map
   // and we use the empty string for the map cache in that case.  If
@@ -176,7 +182,7 @@
   // specific to the name.
   Handle<Name> cache_name = factory()->empty_string();
   Handle<JSObject> current;
-  Handle<Object> next = receiver;
+  Handle<Object> next = stub_holder;
   Handle<JSGlobalObject> global;
   do {
     current = Handle<JSObject>::cast(next);
@@ -191,13 +197,14 @@
 
   // Compile the stub that is either shared for all names or
   // name specific if there are global objects involved.
-  Handle<Code> handler = FindHandler(cache_name, receiver, Code::LOAD_IC);
+  Handle<Code> handler = FindHandler(
+      cache_name, stub_holder, Code::LOAD_IC, cache_holder);
   if (!handler.is_null()) return handler;
 
-  LoadStubCompiler compiler(isolate_);
+  LoadStubCompiler compiler(isolate_, cache_holder);
   handler =
-      compiler.CompileLoadNonexistent(receiver, current, cache_name, global);
-  HeapObject::UpdateMapCodeCache(receiver, cache_name, handler);
+      compiler.CompileLoadNonexistent(object, current, cache_name, global);
+  HeapObject::UpdateMapCodeCache(stub_holder, cache_name, handler);
   return handler;
 }
 
@@ -257,9 +264,8 @@
                                             Handle<JSObject> holder,
                                             Handle<JSFunction> function) {
   // Compute the check type and the map.
-  InlineCacheHolderFlag cache_holder =
-      IC::GetCodeCacheForObject(*object, *holder);
-  Handle<JSObject> stub_holder(IC::GetCodeCacheHolder(
+  InlineCacheHolderFlag cache_holder = IC::GetCodeCacheForObject(*object);
+  Handle<HeapObject> stub_holder(IC::GetCodeCacheHolder(
       isolate_, *object, cache_holder));
 
   // Compute check type based on receiver/holder.
@@ -283,7 +289,7 @@
   }
 
   Code::Flags flags = Code::ComputeMonomorphicFlags(
-      kind, extra_state, Code::CONSTANT, argc, cache_holder);
+      kind, extra_state, cache_holder, Code::CONSTANT, argc);
   Handle<Object> probe(stub_holder->map()->FindInCodeCache(*name, flags),
                        isolate_);
   if (probe->IsCode()) return Handle<Code>::cast(probe);
@@ -312,9 +318,8 @@
                                          Handle<JSObject> holder,
                                          PropertyIndex index) {
   // Compute the check type and the map.
-  InlineCacheHolderFlag cache_holder =
-      IC::GetCodeCacheForObject(*object, *holder);
-  Handle<JSObject> stub_holder(IC::GetCodeCacheHolder(
+  InlineCacheHolderFlag cache_holder = IC::GetCodeCacheForObject(*object);
+  Handle<HeapObject> stub_holder(IC::GetCodeCacheHolder(
       isolate_, *object, cache_holder));
 
   // TODO(1233596): We cannot do receiver map check for non-JS objects
@@ -326,7 +331,7 @@
   }
 
   Code::Flags flags = Code::ComputeMonomorphicFlags(
-      kind, extra_state, Code::FIELD, argc, cache_holder);
+      kind, extra_state, cache_holder, Code::FIELD, argc);
   Handle<Object> probe(stub_holder->map()->FindInCodeCache(*name, flags),
                        isolate_);
   if (probe->IsCode()) return Handle<Code>::cast(probe);
@@ -351,9 +356,8 @@
                                                Handle<Object> object,
                                                Handle<JSObject> holder) {
   // Compute the check type and the map.
-  InlineCacheHolderFlag cache_holder =
-      IC::GetCodeCacheForObject(*object, *holder);
-  Handle<JSObject> stub_holder(IC::GetCodeCacheHolder(
+  InlineCacheHolderFlag cache_holder = IC::GetCodeCacheForObject(*object);
+  Handle<HeapObject> stub_holder(IC::GetCodeCacheHolder(
       isolate_, *object, cache_holder));
 
   // TODO(1233596): We cannot do receiver map check for non-JS objects
@@ -365,7 +369,7 @@
   }
 
   Code::Flags flags = Code::ComputeMonomorphicFlags(
-      kind, extra_state, Code::INTERCEPTOR, argc, cache_holder);
+      kind, extra_state, cache_holder, Code::INTERCEPTOR, argc);
   Handle<Object> probe(stub_holder->map()->FindInCodeCache(*name, flags),
                        isolate_);
   if (probe->IsCode()) return Handle<Code>::cast(probe);
@@ -392,7 +396,7 @@
                                           Handle<PropertyCell> cell,
                                           Handle<JSFunction> function) {
   Code::Flags flags = Code::ComputeMonomorphicFlags(
-      kind, extra_state, Code::NORMAL, argc);
+      kind, extra_state, OWN_MAP, Code::NORMAL, argc);
   Handle<Object> probe(receiver->map()->FindInCodeCache(*name, flags),
                        isolate_);
   if (probe->IsCode()) return Handle<Code>::cast(probe);
@@ -826,7 +830,7 @@
   // can't use either LoadIC or KeyedLoadIC constructors.
   HandleScope scope(isolate);
   IC ic(IC::NO_EXTRA_FRAME, isolate);
-  ASSERT(ic.target()->is_load_stub() || ic.target()->is_keyed_load_stub());
+  ASSERT(ic.IsLoadStub());
   if (!ic.SlowIsUndeclaredGlobal()) return isolate->heap()->undefined_value();
 
   // Throw a reference error.
@@ -1096,10 +1100,7 @@
   masm_.GetCode(&desc);
   Handle<Code> code = factory()->NewCode(desc, flags, masm_.CodeObject());
 #ifdef ENABLE_DISASSEMBLER
-  if (FLAG_print_code_stubs) {
-    CodeTracer::Scope trace_scope(isolate()->GetCodeTracer());
-    code->Disassemble(name, trace_scope.file());
-  }
+  if (FLAG_print_code_stubs) code->Disassemble(name);
 #endif
   return code;
 }
@@ -1127,55 +1128,93 @@
 
 
 Register LoadStubCompiler::HandlerFrontendHeader(
-    Handle<JSObject> object,
+    Handle<Object> object,
     Register object_reg,
     Handle<JSObject> holder,
     Handle<Name> name,
     Label* miss) {
-  return CheckPrototypes(object, object_reg, holder,
+  Handle<JSObject> receiver;
+  PrototypeCheckType check_type = CHECK_ALL_MAPS;
+  int function_index = -1;
+  if (object->IsJSObject()) {
+    receiver = Handle<JSObject>::cast(object);
+    check_type = SKIP_RECEIVER;
+  } else {
+    if (object->IsString()) {
+      function_index = Context::STRING_FUNCTION_INDEX;
+    } else if (object->IsSymbol()) {
+      function_index = Context::SYMBOL_FUNCTION_INDEX;
+    } else if (object->IsNumber()) {
+      function_index = Context::NUMBER_FUNCTION_INDEX;
+    } else {
+      ASSERT(object->IsBoolean());
+      // Booleans use the generic oddball map, so an additional check is
+      // needed to ensure the receiver is really a boolean.
+      GenerateBooleanCheck(object_reg, miss);
+      function_index = Context::BOOLEAN_FUNCTION_INDEX;
+    }
+
+    GenerateDirectLoadGlobalFunctionPrototype(
+        masm(), function_index, scratch1(), miss);
+    receiver = handle(JSObject::cast(object->GetPrototype(isolate())));
+    object_reg = scratch1();
+  }
+
+  // Check that the maps starting from the prototype haven't changed.
+  return CheckPrototypes(receiver, object_reg, holder,
                          scratch1(), scratch2(), scratch3(),
-                         name, miss, SKIP_RECEIVER);
+                         name, miss, check_type);
 }
 
 
 // HandlerFrontend for store uses the name register. It has to be restored
 // before a miss.
 Register StoreStubCompiler::HandlerFrontendHeader(
-    Handle<JSObject> object,
+    Handle<Object> object,
     Register object_reg,
     Handle<JSObject> holder,
     Handle<Name> name,
     Label* miss) {
-  return CheckPrototypes(object, object_reg, holder,
+  return CheckPrototypes(Handle<JSObject>::cast(object), object_reg, holder,
                          this->name(), scratch1(), scratch2(),
                          name, miss, SKIP_RECEIVER);
 }
 
 
-Register BaseLoadStoreStubCompiler::HandlerFrontend(Handle<JSObject> object,
+bool BaseLoadStoreStubCompiler::HasHeapNumberMap(MapHandleList* receiver_maps) {
+  for (int i = 0; i < receiver_maps->length(); ++i) {
+    Handle<Map> map = receiver_maps->at(i);
+    if (map.is_identical_to(isolate()->factory()->heap_number_map())) {
+      return true;
+    }
+  }
+  return false;
+}
+
+
+Register BaseLoadStoreStubCompiler::HandlerFrontend(Handle<Object> object,
                                                     Register object_reg,
                                                     Handle<JSObject> holder,
-                                                    Handle<Name> name,
-                                                    Label* success) {
+                                                    Handle<Name> name) {
   Label miss;
 
   Register reg = HandlerFrontendHeader(object, object_reg, holder, name, &miss);
 
-  HandlerFrontendFooter(name, success, &miss);
+  HandlerFrontendFooter(name, &miss);
+
   return reg;
 }
 
 
 void LoadStubCompiler::NonexistentHandlerFrontend(
-    Handle<JSObject> object,
+    Handle<Object> object,
     Handle<JSObject> last,
     Handle<Name> name,
-    Label* success,
     Handle<JSGlobalObject> global) {
   Label miss;
 
-  Register holder =
-      HandlerFrontendHeader(object, receiver(), last, name, &miss);
+  Register holder = HandlerFrontendHeader(
+      object, receiver(), last, name, &miss);
 
   if (!last->HasFastProperties() &&
       !last->IsJSGlobalObject() &&
@@ -1196,12 +1235,12 @@
     GenerateCheckPropertyCell(masm(), global, name, scratch2(), &miss);
   }
 
-  HandlerFrontendFooter(name, success, &miss);
+  HandlerFrontendFooter(name, &miss);
 }
 
 
 Handle<Code> LoadStubCompiler::CompileLoadField(
-    Handle<JSObject> object,
+    Handle<Object> object,
     Handle<JSObject> holder,
     Handle<Name> name,
     PropertyIndex field,
@@ -1221,13 +1260,11 @@
 
 
 Handle<Code> LoadStubCompiler::CompileLoadConstant(
-    Handle<JSObject> object,
+    Handle<Object> object,
     Handle<JSObject> holder,
     Handle<Name> name,
     Handle<Object> value) {
-  Label success;
-  HandlerFrontend(object, receiver(), holder, name, &success);
-  __ bind(&success);
+  HandlerFrontend(object, receiver(), holder, name);
   GenerateLoadConstant(value);
 
   // Return the generated code.
@@ -1236,15 +1273,12 @@
 
 
 Handle<Code> LoadStubCompiler::CompileLoadCallback(
-    Handle<JSObject> object,
+    Handle<Object> object,
     Handle<JSObject> holder,
     Handle<Name> name,
     Handle<ExecutableAccessorInfo> callback) {
-  Label success;
-
   Register reg = CallbackHandlerFrontend(
-      object, receiver(), holder, name, &success, callback);
-  __ bind(&success);
+      object, receiver(), holder, name, callback);
   GenerateLoadCallback(reg, callback);
 
   // Return the generated code.
@@ -1253,17 +1287,13 @@
 
 
 Handle<Code> LoadStubCompiler::CompileLoadCallback(
-    Handle<JSObject> object,
+    Handle<Object> object,
     Handle<JSObject> holder,
     Handle<Name> name,
     const CallOptimization& call_optimization) {
   ASSERT(call_optimization.is_simple_api_call());
-  Label success;
-
   Handle<JSFunction> callback = call_optimization.constant_function();
-  CallbackHandlerFrontend(
-      object, receiver(), holder, name, &success, callback);
-  __ bind(&success);
+  CallbackHandlerFrontend(object, receiver(), holder, name, callback);
   GenerateLoadCallback(call_optimization);
 
   // Return the generated code.
@@ -1272,16 +1302,13 @@
 
 
 Handle<Code> LoadStubCompiler::CompileLoadInterceptor(
-    Handle<JSObject> object,
+    Handle<Object> object,
     Handle<JSObject> holder,
     Handle<Name> name) {
-  Label success;
-
   LookupResult lookup(isolate());
   LookupPostInterceptor(holder, name, &lookup);
 
-  Register reg = HandlerFrontend(object, receiver(), holder, name, &success);
-  __ bind(&success);
+  Register reg = HandlerFrontend(object, receiver(), holder, name);
   // TODO(368): Compile in the whole chain: all the interceptors in
   // prototypes and ultimate answer.
   GenerateLoadInterceptor(reg, object, holder, &lookup, name);
@@ -1296,7 +1323,6 @@
     Handle<JSObject> interceptor_holder,
     Handle<Name> name,
     LookupResult* lookup) {
-  Label success;
   Handle<JSObject> holder(lookup->holder());
   if (lookup->IsField()) {
     PropertyIndex field = lookup->GetFieldIndex();
@@ -1307,8 +1333,7 @@
       // We found FIELD property in prototype chain of interceptor's holder.
       // Retrieve a field from field's holder.
       Register reg = HandlerFrontend(
-          interceptor_holder, interceptor_reg, holder, name, &success);
-      __ bind(&success);
+          interceptor_holder, interceptor_reg, holder, name);
       GenerateLoadField(
           reg, holder, field, lookup->representation());
     }
@@ -1321,8 +1346,7 @@
     ASSERT(callback->getter() != NULL);
 
     Register reg = CallbackHandlerFrontend(
-        interceptor_holder, interceptor_reg, holder, name, &success, callback);
-    __ bind(&success);
+        interceptor_holder, interceptor_reg, holder, name, callback);
     GenerateLoadCallback(reg, callback);
   }
 }
@@ -1342,14 +1366,11 @@
 
 
 Handle<Code> LoadStubCompiler::CompileLoadViaGetter(
-    Handle<JSObject> object,
+    Handle<Object> object,
     Handle<JSObject> holder,
     Handle<Name> name,
     Handle<JSFunction> getter) {
-  Label success;
-  HandlerFrontend(object, receiver(), holder, name, &success);
-
-  __ bind(&success);
+  HandlerFrontend(object, receiver(), holder, name);
   GenerateLoadViaGetter(masm(), receiver(), getter);
 
   // Return the generated code.
@@ -1442,10 +1463,7 @@
     Handle<JSObject> holder,
     Handle<Name> name,
     Handle<JSFunction> setter) {
-  Label success;
-  HandlerFrontend(object, receiver(), holder, name, &success);
-
-  __ bind(&success);
+  HandlerFrontend(object, receiver(), holder, name);
   GenerateStoreViaSetter(masm(), setter);
 
   return GetCode(kind(), Code::CALLBACKS, name);
@@ -1555,7 +1573,7 @@
                                                 Code::StubType type,
                                                 Handle<Name> name) {
   Code::Flags flags = Code::ComputeFlags(
-      Code::HANDLER, MONOMORPHIC, extra_state(), type, kind);
+      Code::HANDLER, MONOMORPHIC, extra_state(), type, kind, cache_holder_);
   Handle<Code> code = GetCodeWithFlags(flags, name);
   PROFILE(isolate(), CodeCreateEvent(log_kind(code), *code, *name));
   JitEvent(name, code);
@@ -1723,11 +1741,8 @@
 Handle<Code> CallStubCompiler::GetCode(Code::StubType type,
                                        Handle<Name> name) {
   int argc = arguments_.immediate();
-  Code::Flags flags = Code::ComputeMonomorphicFlags(kind_,
-                                                    extra_state_,
-                                                    type,
-                                                    argc,
-                                                    cache_holder_);
+  Code::Flags flags = Code::ComputeMonomorphicFlags(
+      kind_, extra_state_, cache_holder_, type, argc);
   return GetCodeWithFlags(flags, name);
 }
 
diff --git a/src/stub-cache.h b/src/stub-cache.h
index 9a82785..1daffff 100644
--- a/src/stub-cache.h
+++ b/src/stub-cache.h
@@ -83,29 +83,21 @@
   Handle<Code> FindIC(Handle<Name> name,
                       Handle<Map> stub_holder_map,
                       Code::Kind kind,
-                      Code::ExtraICState extra_state = Code::kNoExtraICState);
-
-  Handle<Code> FindIC(Handle<Name> name,
-                      Handle<JSObject> stub_holder,
-                      Code::Kind kind,
-                      Code::ExtraICState extra_state = Code::kNoExtraICState);
+                      Code::ExtraICState extra_state = Code::kNoExtraICState,
+                      InlineCacheHolderFlag cache_holder = OWN_MAP);
 
   Handle<Code> FindHandler(Handle<Name> name,
-                           Handle<JSObject> receiver,
+                           Handle<HeapObject> stub_holder,
                            Code::Kind kind,
+                           InlineCacheHolderFlag cache_holder = OWN_MAP,
                            StrictModeFlag strict_mode = kNonStrictMode);
 
-  Handle<Code> ComputeMonomorphicIC(Handle<HeapObject> receiver,
+  Handle<Code> ComputeMonomorphicIC(Handle<Name> name,
+                                    Handle<Object> receiver,
                                     Handle<Code> handler,
-                                    Handle<Name> name,
                                     StrictModeFlag strict_mode);
 
-  // Computes the right stub matching. Inserts the result in the
-  // cache before returning.  This might compile a stub if needed.
-  Handle<Code> ComputeLoadNonexistent(Handle<Name> name,
-                                      Handle<JSObject> object);
-
-  // ---
+  Handle<Code> ComputeLoadNonexistent(Handle<Name> name, Handle<Object> object);
 
   Handle<Code> ComputeKeyedLoadElement(Handle<Map> receiver_map);
 
@@ -501,6 +493,7 @@
                            Label* miss,
                            PrototypeCheckType check = CHECK_ALL_MAPS);
 
+  void GenerateBooleanCheck(Register object, Label* miss);
 
  protected:
   Handle<Code> GetCodeWithFlags(Code::Flags flags, const char* name);
@@ -531,8 +524,10 @@
 
 class BaseLoadStoreStubCompiler: public StubCompiler {
  public:
-  BaseLoadStoreStubCompiler(Isolate* isolate, Code::Kind kind)
-      : StubCompiler(isolate), kind_(kind) {
+  BaseLoadStoreStubCompiler(Isolate* isolate,
+                            Code::Kind kind,
+                            InlineCacheHolderFlag cache_holder = OWN_MAP)
+      : StubCompiler(isolate), kind_(kind), cache_holder_(cache_holder) {
     InitializeRegisters();
   }
   virtual ~BaseLoadStoreStubCompiler() { }
@@ -563,21 +558,18 @@
   }
 
  protected:
-  virtual Register HandlerFrontendHeader(Handle<JSObject> object,
+  virtual Register HandlerFrontendHeader(Handle<Object> object,
                                          Register object_reg,
                                          Handle<JSObject> holder,
                                          Handle<Name> name,
                                          Label* miss) = 0;
 
-  virtual void HandlerFrontendFooter(Handle<Name> name,
-                                     Label* success,
-                                     Label* miss) = 0;
+  virtual void HandlerFrontendFooter(Handle<Name> name, Label* miss) = 0;
 
-  Register HandlerFrontend(Handle<JSObject> object,
+  Register HandlerFrontend(Handle<Object> object,
                            Register object_reg,
                            Handle<JSObject> holder,
-                           Handle<Name> name,
-                           Label* success);
+                           Handle<Name> name);
 
   Handle<Code> GetCode(Code::Kind kind,
                        Code::StubType type,
@@ -616,43 +608,48 @@
 
   void InitializeRegisters();
 
+  bool HasHeapNumberMap(MapHandleList* receiver_maps);
+
   Code::Kind kind_;
+  InlineCacheHolderFlag cache_holder_;
   Register* registers_;
 };
 
 
 class LoadStubCompiler: public BaseLoadStoreStubCompiler {
  public:
-  LoadStubCompiler(Isolate* isolate, Code::Kind kind = Code::LOAD_IC)
-      : BaseLoadStoreStubCompiler(isolate, kind) { }
+  LoadStubCompiler(Isolate* isolate,
+                   InlineCacheHolderFlag cache_holder = OWN_MAP,
+                   Code::Kind kind = Code::LOAD_IC)
+      : BaseLoadStoreStubCompiler(isolate, kind, cache_holder) { }
   virtual ~LoadStubCompiler() { }
 
-  Handle<Code> CompileLoadField(Handle<JSObject> object,
+  Handle<Code> CompileLoadField(Handle<Object> object,
                                 Handle<JSObject> holder,
                                 Handle<Name> name,
                                 PropertyIndex index,
                                 Representation representation);
 
-  Handle<Code> CompileLoadCallback(Handle<JSObject> object,
+  Handle<Code> CompileLoadCallback(Handle<Object> object,
                                    Handle<JSObject> holder,
                                    Handle<Name> name,
                                    Handle<ExecutableAccessorInfo> callback);
 
-  Handle<Code> CompileLoadCallback(Handle<JSObject> object,
+  Handle<Code> CompileLoadCallback(Handle<Object> object,
                                    Handle<JSObject> holder,
                                    Handle<Name> name,
                                    const CallOptimization& call_optimization);
 
-  Handle<Code> CompileLoadConstant(Handle<JSObject> object,
+  Handle<Code> CompileLoadConstant(Handle<Object> object,
                                    Handle<JSObject> holder,
                                    Handle<Name> name,
                                    Handle<Object> value);
 
-  Handle<Code> CompileLoadInterceptor(Handle<JSObject> object,
+  Handle<Code> CompileLoadInterceptor(Handle<Object> object,
                                       Handle<JSObject> holder,
                                       Handle<Name> name);
 
-  Handle<Code> CompileLoadViaGetter(Handle<JSObject> object,
+  Handle<Code> CompileLoadViaGetter(Handle<Object> object,
                                     Handle<JSObject> holder,
                                     Handle<Name> name,
                                     Handle<JSFunction> getter);
@@ -661,12 +658,12 @@
                                     Register receiver,
                                     Handle<JSFunction> getter);
 
-  Handle<Code> CompileLoadNonexistent(Handle<JSObject> object,
+  Handle<Code> CompileLoadNonexistent(Handle<Object> object,
                                       Handle<JSObject> last,
                                       Handle<Name> name,
                                       Handle<JSGlobalObject> global);
 
-  Handle<Code> CompileLoadGlobal(Handle<JSObject> object,
+  Handle<Code> CompileLoadGlobal(Handle<Object> object,
                                  Handle<GlobalObject> holder,
                                  Handle<PropertyCell> cell,
                                  Handle<Name> name,
@@ -675,26 +672,22 @@
   static Register* registers();
 
  protected:
-  virtual Register HandlerFrontendHeader(Handle<JSObject> object,
+  virtual Register HandlerFrontendHeader(Handle<Object> object,
                                          Register object_reg,
                                          Handle<JSObject> holder,
                                          Handle<Name> name,
                                          Label* miss);
 
-  virtual void HandlerFrontendFooter(Handle<Name> name,
-                                     Label* success,
-                                     Label* miss);
+  virtual void HandlerFrontendFooter(Handle<Name> name, Label* miss);
 
-  Register CallbackHandlerFrontend(Handle<JSObject> object,
+  Register CallbackHandlerFrontend(Handle<Object> object,
                                    Register object_reg,
                                    Handle<JSObject> holder,
                                    Handle<Name> name,
-                                   Label* success,
                                    Handle<Object> callback);
-  void NonexistentHandlerFrontend(Handle<JSObject> object,
+  void NonexistentHandlerFrontend(Handle<Object> object,
                                   Handle<JSObject> last,
                                   Handle<Name> name,
-                                  Label* success,
                                   Handle<JSGlobalObject> global);
 
   void GenerateLoadField(Register reg,
@@ -706,7 +699,7 @@
                             Handle<ExecutableAccessorInfo> callback);
   void GenerateLoadCallback(const CallOptimization& call_optimization);
   void GenerateLoadInterceptor(Register holder_reg,
-                               Handle<JSObject> object,
+                               Handle<Object> object,
                                Handle<JSObject> holder,
                                LookupResult* lookup,
                                Handle<Name> name);
@@ -726,8 +719,9 @@
 
 class KeyedLoadStubCompiler: public LoadStubCompiler {
  public:
-  explicit KeyedLoadStubCompiler(Isolate* isolate)
-      : LoadStubCompiler(isolate, Code::KEYED_LOAD_IC) { }
+  KeyedLoadStubCompiler(Isolate* isolate,
+                        InlineCacheHolderFlag cache_holder = OWN_MAP)
+      : LoadStubCompiler(isolate, cache_holder, Code::KEYED_LOAD_IC) { }
 
   Handle<Code> CompileLoadElement(Handle<Map> receiver_map);
 
@@ -827,15 +821,13 @@
   }
 
  protected:
-  virtual Register HandlerFrontendHeader(Handle<JSObject> object,
+  virtual Register HandlerFrontendHeader(Handle<Object> object,
                                          Register object_reg,
                                          Handle<JSObject> holder,
                                          Handle<Name> name,
                                          Label* miss);
 
-  virtual void HandlerFrontendFooter(Handle<Name> name,
-                                     Label* success,
-                                     Label* miss);
+  virtual void HandlerFrontendFooter(Handle<Name> name, Label* miss);
   void GenerateRestoreName(MacroAssembler* masm,
                            Label* label,
                            Handle<Name> name);
@@ -928,8 +920,7 @@
   void CompileHandlerFrontend(Handle<Object> object,
                               Handle<JSObject> holder,
                               Handle<Name> name,
-                              CheckType check,
-                              Label* success);
+                              CheckType check);
 
   void CompileHandlerBackend(Handle<JSFunction> function);
 
diff --git a/src/type-info.cc b/src/type-info.cc
index 65d1364..c548ad3 100644
--- a/src/type-info.cc
+++ b/src/type-info.cc
@@ -298,11 +298,8 @@
   Code::ExtraICState extra_ic_state =
       CallIC::Contextual::encode(call_kind == CALL_AS_FUNCTION);
 
-  Code::Flags flags = Code::ComputeMonomorphicFlags(Code::CALL_IC,
-                                                    extra_ic_state,
-                                                    Code::NORMAL,
-                                                    arity,
-                                                    OWN_MAP);
+  Code::Flags flags = Code::ComputeMonomorphicFlags(
+      Code::CALL_IC, extra_ic_state, OWN_MAP, Code::NORMAL, arity);
   CollectReceiverTypes(expr->CallFeedbackId(), name, flags, types);
 }
 
diff --git a/src/v8conversions.h b/src/v8conversions.h
index 3a7b524..d3da9f8 100644
--- a/src/v8conversions.h
+++ b/src/v8conversions.h
@@ -60,10 +60,17 @@
                            Object* number) {
   SealHandleScope shs(isolate);
   if (number->IsSmi()) {
-    return Smi::cast(number)->value();
+    int value = Smi::cast(number)->value();
+    CHECK_GE(value, 0);
+    ASSERT(
+      static_cast<unsigned>(Smi::kMaxValue)
+        <= std::numeric_limits<size_t>::max());
+    return static_cast<size_t>(value);
   } else {
     ASSERT(number->IsHeapNumber());
     double value = HeapNumber::cast(number)->value();
+    CHECK(value >= 0 &&
+          value <= std::numeric_limits<size_t>::max());
     return static_cast<size_t>(value);
   }
 }
diff --git a/src/version.cc b/src/version.cc
index d11bfcd..5b8b624 100644
--- a/src/version.cc
+++ b/src/version.cc
@@ -34,7 +34,7 @@
 // system so their names cannot be changed without changing the scripts.
 #define MAJOR_VERSION     3
 #define MINOR_VERSION     23
-#define BUILD_NUMBER      4
+#define BUILD_NUMBER      5
 #define PATCH_LEVEL       0
 // Use 1 for candidates and 0 otherwise.
 // (Boolean macro values are not supported by all preprocessors.)
diff --git a/src/x64/code-stubs-x64.cc b/src/x64/code-stubs-x64.cc
index 9d95fc4..a3fb077 100644
--- a/src/x64/code-stubs-x64.cc
+++ b/src/x64/code-stubs-x64.cc
@@ -4224,15 +4224,9 @@
     STATIC_ASSERT((kStringEncodingMask & kOneByteStringTag) != 0);
     STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
     __ testb(rbx, Immediate(kStringEncodingMask));
-    // Make long jumps when allocations tracking is on due to
-    // RecordObjectAllocation inside MacroAssembler::Allocate.
-    Label::Distance jump_distance =
-        masm->isolate()->heap_profiler()->is_tracking_allocations()
-        ? Label::kFar
-        : Label::kNear;
-    __ j(zero, &two_byte_slice, jump_distance);
+    __ j(zero, &two_byte_slice, Label::kNear);
     __ AllocateAsciiSlicedString(rax, rbx, r14, &runtime);
-    __ jmp(&set_slice_header, jump_distance);
+    __ jmp(&set_slice_header, Label::kNear);
     __ bind(&two_byte_slice);
     __ AllocateTwoByteSlicedString(rax, rbx, r14, &runtime);
     __ bind(&set_slice_header);
@@ -5593,10 +5587,12 @@
       __ Assert(equal, kExpectedAllocationSiteInCell);
     }
 
-    // Save the resulting elements kind in type info
-    __ Integer32ToSmi(rdx, rdx);
-    __ movq(FieldOperand(rcx, AllocationSite::kTransitionInfoOffset), rdx);
-    __ SmiToInteger32(rdx, rdx);
+    // Save the resulting elements kind in type info. We can't just store r3
+    // in the AllocationSite::transition_info field because elements kind is
+    // restricted to a portion of the field...upper bits need to be left alone.
+    STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
+    __ SmiAddConstant(FieldOperand(rcx, AllocationSite::kTransitionInfoOffset),
+                      Smi::FromInt(kFastElementsKindPackedToHoley));
 
     __ bind(&normal_sequence);
     int last_index = GetSequenceIndexFromFastElementsKind(
@@ -5738,8 +5734,11 @@
          masm->isolate()->factory()->allocation_site_map());
   __ j(not_equal, &no_info);
 
+  // Only look at the lower 16 bits of the transition info.
   __ movq(rdx, FieldOperand(rdx, AllocationSite::kTransitionInfoOffset));
   __ SmiToInteger32(rdx, rdx);
+  STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
+  __ and_(rdx, Immediate(AllocationSite::ElementsKindBits::kMask));
   GenerateDispatchToArrayStub(masm, DONT_OVERRIDE);
 
   __ bind(&no_info);
diff --git a/src/x64/ic-x64.cc b/src/x64/ic-x64.cc
index 721ae1d..fe8734c 100644
--- a/src/x64/ic-x64.cc
+++ b/src/x64/ic-x64.cc
@@ -749,10 +749,10 @@
   __ JumpIfSmi(rdx, &slow_with_tagged_index);
   // Get the map from the receiver.
   __ movq(r9, FieldOperand(rdx, HeapObject::kMapOffset));
-  // Check that the receiver does not require access checks.  We need
-  // to do this because this generic stub does not perform map checks.
+  // Check that the receiver does not require access checks and is not observed.
+  // The generic stub does not perform map checks or handle observed objects.
   __ testb(FieldOperand(r9, Map::kBitFieldOffset),
-           Immediate(1 << Map::kIsAccessCheckNeeded));
+           Immediate(1 << Map::kIsAccessCheckNeeded | 1 << Map::kIsObserved));
   __ j(not_zero, &slow_with_tagged_index);
   // Check that the key is a smi.
   __ JumpIfNotSmi(rcx, &slow_with_tagged_index);
diff --git a/src/x64/macro-assembler-x64.cc b/src/x64/macro-assembler-x64.cc
index 2f7166e..c36f6a6 100644
--- a/src/x64/macro-assembler-x64.cc
+++ b/src/x64/macro-assembler-x64.cc
@@ -4081,10 +4081,7 @@
                               AllocationFlags flags) {
   ASSERT((flags & (RESULT_CONTAINS_TOP | SIZE_IN_WORDS)) == 0);
   ASSERT(object_size <= Page::kMaxNonCodeHeapObjectSize);
-  if (!FLAG_inline_new ||
-      // TODO(mstarzinger): Implement more efficiently by keeping then
-      // bump-pointer allocation area empty instead of recompiling code.
-      isolate()->heap_profiler()->is_tracking_allocations()) {
+  if (!FLAG_inline_new) {
     if (emit_debug_code()) {
       // Trash the registers to simulate an allocation failure.
       movl(result, Immediate(0x7091));
@@ -4164,10 +4161,7 @@
                               Label* gc_required,
                               AllocationFlags flags) {
   ASSERT((flags & SIZE_IN_WORDS) == 0);
-  if (!FLAG_inline_new ||
-      // TODO(mstarzinger): Implement more efficiently by keeping then
-      // bump-pointer allocation area empty instead of recompiling code.
-      isolate()->heap_profiler()->is_tracking_allocations()) {
+  if (!FLAG_inline_new) {
     if (emit_debug_code()) {
       // Trash the registers to simulate an allocation failure.
       movl(result, Immediate(0x7091));
diff --git a/src/x64/stub-cache-x64.cc b/src/x64/stub-cache-x64.cc
index 499ccdf..edfb47b 100644
--- a/src/x64/stub-cache-x64.cc
+++ b/src/x64/stub-cache-x64.cc
@@ -1200,34 +1200,33 @@
 }
 
 
-void LoadStubCompiler::HandlerFrontendFooter(Handle<Name> name,
-                                             Label* success,
-                                             Label* miss) {
+void LoadStubCompiler::HandlerFrontendFooter(Handle<Name> name, Label* miss) {
   if (!miss->is_unused()) {
-    __ jmp(success);
+    Label success;
+    __ jmp(&success);
     __ bind(miss);
     TailCallBuiltin(masm(), MissBuiltin(kind()));
+    __ bind(&success);
   }
 }
 
 
-void StoreStubCompiler::HandlerFrontendFooter(Handle<Name> name,
-                                              Label* success,
-                                              Label* miss) {
+void StoreStubCompiler::HandlerFrontendFooter(Handle<Name> name, Label* miss) {
   if (!miss->is_unused()) {
-    __ jmp(success);
+    Label success;
+    __ jmp(&success);
     GenerateRestoreName(masm(), miss, name);
     TailCallBuiltin(masm(), MissBuiltin(kind()));
+    __ bind(&success);
   }
 }
 
 
 Register LoadStubCompiler::CallbackHandlerFrontend(
-    Handle<JSObject> object,
+    Handle<Object> object,
     Register object_reg,
     Handle<JSObject> holder,
     Handle<Name> name,
-    Label* success,
     Handle<Object> callback) {
   Label miss;
 
@@ -1268,7 +1267,7 @@
     __ j(not_equal, &miss);
   }
 
-  HandlerFrontendFooter(name, success, &miss);
+  HandlerFrontendFooter(name, &miss);
   return reg;
 }
 
@@ -1389,7 +1388,7 @@
 
 void LoadStubCompiler::GenerateLoadInterceptor(
     Register holder_reg,
-    Handle<JSObject> object,
+    Handle<Object> object,
     Handle<JSObject> interceptor_holder,
     LookupResult* lookup,
     Handle<Name> name) {
@@ -1629,7 +1628,7 @@
   }
 
   Handle<AllocationSite> site = isolate()->factory()->NewAllocationSite();
-  site->set_transition_info(Smi::FromInt(GetInitialFastElementsKind()));
+  site->SetElementsKind(GetInitialFastElementsKind());
   Handle<Cell> site_feedback_cell = isolate()->factory()->NewCell(site);
   __ movq(rax, Immediate(argc));
   __ Move(rbx, site_feedback_cell);
@@ -1661,8 +1660,12 @@
   //  -- rsp[(argc + 1) * 8] : receiver
   // -----------------------------------
 
-  // If object is not an array, bail out to regular call.
-  if (!object->IsJSArray() || !cell.is_null()) return Handle<Code>::null();
+  // If object is not an array or is observed, bail out to regular call.
+  if (!object->IsJSArray() ||
+      !cell.is_null() ||
+      Handle<JSArray>::cast(object)->map()->is_observed()) {
+    return Handle<Code>::null();
+  }
 
   Label miss;
   GenerateNameCheck(name, &miss);
@@ -1912,8 +1915,12 @@
   //  -- rsp[(argc + 1) * 8] : receiver
   // -----------------------------------
 
-  // If object is not an array, bail out to regular call.
-  if (!object->IsJSArray() || !cell.is_null()) return Handle<Code>::null();
+  // If object is not an array or is observed, bail out to regular call.
+  if (!object->IsJSArray() ||
+      !cell.is_null() ||
+      Handle<JSArray>::cast(object)->map()->is_observed()) {
+    return Handle<Code>::null();
+  }
 
   Label miss, return_undefined, call_builtin;
   GenerateNameCheck(name, &miss);
@@ -2507,11 +2514,21 @@
 }
 
 
+void StubCompiler::GenerateBooleanCheck(Register object, Label* miss) {
+  Label success;
+  // Check that the object is a boolean.
+  __ CompareRoot(object, Heap::kTrueValueRootIndex);
+  __ j(equal, &success);
+  __ CompareRoot(object, Heap::kFalseValueRootIndex);
+  __ j(not_equal, miss);
+  __ bind(&success);
+}
+
+
 void CallStubCompiler::CompileHandlerFrontend(Handle<Object> object,
                                               Handle<JSObject> holder,
                                               Handle<Name> name,
-                                              CheckType check,
-                                              Label* success) {
+                                              CheckType check) {
   // ----------- S t a t e -------------
   // rcx                 : function name
   // rsp[0]              : return address
@@ -2593,13 +2610,7 @@
       break;
     }
     case BOOLEAN_CHECK: {
-      Label fast;
-      // Check that the object is a boolean.
-      __ CompareRoot(rdx, Heap::kTrueValueRootIndex);
-      __ j(equal, &fast);
-      __ CompareRoot(rdx, Heap::kFalseValueRootIndex);
-      __ j(not_equal, &miss);
-      __ bind(&fast);
+      GenerateBooleanCheck(rdx, &miss);
       // Check that the maps starting from the prototype haven't changed.
       GenerateDirectLoadGlobalFunctionPrototype(
           masm(), Context::BOOLEAN_FUNCTION_INDEX, rax, &miss);
@@ -2610,11 +2621,14 @@
     }
   }
 
-  __ jmp(success);
+  Label success;
+  __ jmp(&success);
 
   // Handle call cache miss.
   __ bind(&miss);
   GenerateMissBranch();
+
+  __ bind(&success);
 }
 
 
@@ -2643,10 +2657,7 @@
     if (!code.is_null()) return code;
   }
 
-  Label success;
-
-  CompileHandlerFrontend(object, holder, name, check, &success);
-  __ bind(&success);
+  CompileHandlerFrontend(object, holder, name, check);
   CompileHandlerBackend(function);
 
   // Return the generated code.
@@ -2782,9 +2793,7 @@
     Handle<JSObject> holder,
     Handle<Name> name,
     Handle<ExecutableAccessorInfo> callback) {
-  Label success;
-  HandlerFrontend(object, receiver(), holder, name, &success);
-  __ bind(&success);
+  HandlerFrontend(object, receiver(), holder, name);
 
   __ PopReturnAddressTo(scratch1());
   __ push(receiver());
@@ -2808,9 +2817,7 @@
     Handle<JSObject> holder,
     Handle<Name> name,
     const CallOptimization& call_optimization) {
-  Label success;
-  HandlerFrontend(object, receiver(), holder, name, &success);
-  __ bind(&success);
+  HandlerFrontend(object, receiver(), holder, name);
 
   Register values[] = { value() };
   GenerateFastApiCall(
@@ -2924,15 +2931,12 @@
 
 
 Handle<Code> LoadStubCompiler::CompileLoadNonexistent(
-    Handle<JSObject> object,
+    Handle<Object> object,
     Handle<JSObject> last,
     Handle<Name> name,
     Handle<JSGlobalObject> global) {
-  Label success;
+  NonexistentHandlerFrontend(object, last, name, global);
 
-  NonexistentHandlerFrontend(object, last, name, &success, global);
-
-  __ bind(&success);
   // Return undefined if maps of the full prototype chain are still the
   // same and no global property with this name contains a value.
   __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
@@ -3027,12 +3031,12 @@
 
 
 Handle<Code> LoadStubCompiler::CompileLoadGlobal(
-    Handle<JSObject> object,
+    Handle<Object> object,
     Handle<GlobalObject> global,
     Handle<PropertyCell> cell,
     Handle<Name> name,
     bool is_dont_delete) {
-  Label success, miss;
+  Label miss;
   // TODO(verwaest): Directly store to rax. Currently we cannot do this, since
   // rax is used as receiver(), which we would otherwise clobber before a
   // potential miss.
@@ -3051,8 +3055,7 @@
     __ Check(not_equal, kDontDeleteCellsCannotContainTheHole);
   }
 
-  HandlerFrontendFooter(name, &success, &miss);
-  __ bind(&success);
+  HandlerFrontendFooter(name, &miss);
 
   Counters* counters = isolate()->counters();
   __ IncrementCounter(counters->named_load_global_stub(), 1);
@@ -3076,17 +3079,25 @@
     GenerateNameCheck(name, this->name(), &miss);
   }
 
-  __ JumpIfSmi(receiver(), &miss);
+  Label number_case;
+  Label* smi_target = HasHeapNumberMap(receiver_maps) ? &number_case : &miss;
+  __ JumpIfSmi(receiver(), smi_target);
+
   Register map_reg = scratch1();
   __ movq(map_reg, FieldOperand(receiver(), HeapObject::kMapOffset));
   int receiver_count = receiver_maps->length();
   int number_of_handled_maps = 0;
+  Handle<Map> heap_number_map = isolate()->factory()->heap_number_map();
   for (int current = 0; current < receiver_count; ++current) {
     Handle<Map> map = receiver_maps->at(current);
     if (!map->is_deprecated()) {
       number_of_handled_maps++;
       // Check map and tail call if there's a match
       __ Cmp(map_reg, receiver_maps->at(current));
+      if (map.is_identical_to(heap_number_map)) {
+        ASSERT(!number_case.is_unused());
+        __ bind(&number_case);
+      }
       __ j(equal, handlers->at(current), RelocInfo::CODE_TARGET);
     }
   }
diff --git a/test/cctest/cctest.h b/test/cctest/cctest.h
index 7f84c25..428245d 100644
--- a/test/cctest/cctest.h
+++ b/test/cctest/cctest.h
@@ -332,6 +332,7 @@
 static inline void SimulateFullSpace(v8::internal::NewSpace* space) {
   int new_linear_size = static_cast<int>(
       *space->allocation_limit_address() - *space->allocation_top_address());
+  if (new_linear_size == 0) return;
   v8::internal::MaybeObject* maybe = space->AllocateRaw(new_linear_size);
   v8::internal::FreeListNode* node = v8::internal::FreeListNode::cast(maybe);
   node->set_size(space->heap(), new_linear_size);
@@ -340,9 +341,7 @@
 
 // Helper function that simulates a full old-space in the heap.
 static inline void SimulateFullSpace(v8::internal::PagedSpace* space) {
-  int old_linear_size = static_cast<int>(space->limit() - space->top());
-  space->Free(space->top(), old_linear_size);
-  space->SetTop(space->limit(), space->limit());
+  space->EmptyAllocationInfo();
   space->ResetFreeList();
   space->ClearStats();
 }
diff --git a/test/cctest/cctest.status b/test/cctest/cctest.status
index 58550d8..ad7ee91 100644
--- a/test/cctest/cctest.status
+++ b/test/cctest/cctest.status
@@ -67,6 +67,9 @@
 
   # BUG(2999).
   'test-cpu-profiler/CollectCpuProfile': [PASS, FAIL],
+
+  # BUG(3005).
+  'test-alloc/CodeRange': [PASS, FLAKY],
 }],  # 'system == windows'
 
 ##############################################################################
diff --git a/test/cctest/test-api.cc b/test/cctest/test-api.cc
index 15c7263..6a75cd7 100644
--- a/test/cctest/test-api.cc
+++ b/test/cctest/test-api.cc
@@ -17131,6 +17131,14 @@
   CHECK_LT(final_size, initial_size + 1);
 }
 
+
+TEST(Regress2333) {
+  LocalContext env;
+  for (int i = 0; i < 3; i++) {
+    CcTest::heap()->PerformScavenge();
+  }
+}
+
 static uint32_t* stack_limit;
 
 static void GetStackLimitCallback(
diff --git a/test/cctest/test-heap-profiler.cc b/test/cctest/test-heap-profiler.cc
index 12638d2..e5d2c6b 100644
--- a/test/cctest/test-heap-profiler.cc
+++ b/test/cctest/test-heap-profiler.cc
@@ -31,6 +31,7 @@
 
 #include "v8.h"
 
+#include "allocation-tracker.h"
 #include "cctest.h"
 #include "hashmap.h"
 #include "heap-profiler.h"
@@ -39,6 +40,12 @@
 #include "utils-inl.h"
 #include "../include/v8-profiler.h"
 
+using i::AllocationTraceNode;
+using i::AllocationTraceTree;
+using i::AllocationTracker;
+using i::HashMap;
+using i::Vector;
+
 namespace {
 
 class NamedEntriesDetector {
@@ -2125,3 +2132,101 @@
   node = GetNodeByPath(snapshot, builtin_path, ARRAY_SIZE(builtin_path));
   CHECK_NE(NULL, node);
 }
+
+
+static const char* record_trace_tree_source =
+"var topFunctions = [];\n"
+"var global = this;\n"
+"function generateFunctions(width, depth) {\n"
+"  var script = [];\n"
+"  for (var i = 0; i < width; i++) {\n"
+"    for (var j = 0; j < depth; j++) {\n"
+"      script.push('function f_' + i + '_' + j + '(x) {\\n');\n"
+"      script.push('  try {\\n');\n"
+"      if (j < depth-2) {\n"
+"        script.push('    return f_' + i + '_' + (j+1) + '(x+1);\\n');\n"
+"      } else if (j == depth - 2) {\n"
+"        script.push('    return new f_' + i + '_' + (depth - 1) + '();\\n');\n"
+"      } else if (j == depth - 1) {\n"
+"        script.push('    this.ts = Date.now();\\n');\n"
+"      }\n"
+"      script.push('  } catch (e) {}\\n');\n"
+"      script.push('}\\n');\n"
+"      \n"
+"    }\n"
+"  }\n"
+"  var script = script.join('');\n"
+"  // throw script;\n"
+"  global.eval(script);\n"
+"  for (var i = 0; i < width; i++) {\n"
+"    topFunctions.push(this['f_' + i + '_0']);\n"
+"  }\n"
+"}\n"
+"\n"
+"var width = 3;\n"
+"var depth = 3;\n"
+"generateFunctions(width, depth);\n"
+"var instances = [];\n"
+"function start() {\n"
+"  for (var i = 0; i < width; i++) {\n"
+"    instances.push(topFunctions[i](0));\n"
+"  }\n"
+"}\n"
+"\n"
+"for (var i = 0; i < 100; i++) start();\n";
+
+
+static i::HeapSnapshot* ToInternal(const v8::HeapSnapshot* snapshot) {
+  return const_cast<i::HeapSnapshot*>(
+      reinterpret_cast<const i::HeapSnapshot*>(snapshot));
+}
+
+
+static AllocationTraceNode* FindNode(
+    AllocationTracker* tracker, const Vector<const char*>& names) {
+  AllocationTraceNode* node = tracker->trace_tree()->root();
+  for (int i = 0; node != NULL && i < names.length(); i++) {
+    const char* name = names[i];
+    Vector<AllocationTraceNode*> children = node->children();
+    node = NULL;
+    for (int j = 0; j < children.length(); j++) {
+      v8::SnapshotObjectId id = children[j]->function_id();
+      AllocationTracker::FunctionInfo* info = tracker->GetFunctionInfo(id);
+      if (info && strcmp(info->name, name) == 0) {
+        node = children[j];
+        break;
+      }
+    }
+  }
+  return node;
+}
+
+
+TEST(TrackHeapAllocations) {
+  v8::HandleScope scope(v8::Isolate::GetCurrent());
+  LocalContext env;
+
+  v8::HeapProfiler* heap_profiler = env->GetIsolate()->GetHeapProfiler();
+  heap_profiler->StartRecordingHeapAllocations();
+
+  CompileRun(record_trace_tree_source);
+
+  const v8::HeapSnapshot* snapshot = heap_profiler->TakeHeapSnapshot(
+      v8::String::New("Test"));
+  i::HeapSnapshotsCollection* collection = ToInternal(snapshot)->collection();
+  AllocationTracker* tracker = collection->allocation_tracker();
+  CHECK_NE(NULL, tracker);
+  // Resolve all function locations.
+  tracker->PrepareForSerialization();
+  // Print for better diagnostics in case of failure.
+  tracker->trace_tree()->Print(tracker);
+
+  const char* names[] =
+      { "(anonymous function)", "start", "f_0_0", "f_0_1", "f_0_2" };
+  AllocationTraceNode* node =
+      FindNode(tracker, Vector<const char*>(names, ARRAY_SIZE(names)));
+  CHECK_NE(NULL, node);
+  CHECK_GE(node->allocation_count(), 100);
+  CHECK_GE(node->allocation_size(), 4 * node->allocation_count());
+  heap_profiler->StopRecordingHeapAllocations();
+}
diff --git a/test/cctest/test-heap.cc b/test/cctest/test-heap.cc
index 3426cdb..799146e 100644
--- a/test/cctest/test-heap.cc
+++ b/test/cctest/test-heap.cc
@@ -760,7 +760,7 @@
   CHECK(array->HasFastSmiOrObjectElements());
 
   // array[length] = name.
-  array->SetElement(0, *name, NONE, kNonStrictMode)->ToObjectChecked();
+  JSReceiver::SetElement(array, 0, name, NONE, kNonStrictMode);
   CHECK_EQ(Smi::FromInt(1), array->length());
   CHECK_EQ(array->GetElement(isolate, 0), *name);
 
@@ -775,7 +775,7 @@
   CHECK(array->HasDictionaryElements());  // Must be in slow mode.
 
   // array[length] = name.
-  array->SetElement(int_length, *name, NONE, kNonStrictMode)->ToObjectChecked();
+  JSReceiver::SetElement(array, int_length, name, NONE, kNonStrictMode);
   uint32_t new_int_length = 0;
   CHECK(array->length()->ToArrayIndex(&new_int_length));
   CHECK_EQ(static_cast<double>(int_length), new_int_length - 1);
@@ -805,8 +805,8 @@
   JSReceiver::SetProperty(obj, first, one, NONE, kNonStrictMode);
   JSReceiver::SetProperty(obj, second, two, NONE, kNonStrictMode);
 
-  obj->SetElement(0, *first, NONE, kNonStrictMode)->ToObjectChecked();
-  obj->SetElement(1, *second, NONE, kNonStrictMode)->ToObjectChecked();
+  JSReceiver::SetElement(obj, 0, first, NONE, kNonStrictMode);
+  JSReceiver::SetElement(obj, 1, second, NONE, kNonStrictMode);
 
   // Make the clone.
   Handle<JSObject> clone = JSObject::Copy(obj);
@@ -822,8 +822,8 @@
   JSReceiver::SetProperty(clone, first, two, NONE, kNonStrictMode);
   JSReceiver::SetProperty(clone, second, one, NONE, kNonStrictMode);
 
-  clone->SetElement(0, *second, NONE, kNonStrictMode)->ToObjectChecked();
-  clone->SetElement(1, *first, NONE, kNonStrictMode)->ToObjectChecked();
+  JSReceiver::SetElement(clone, 0, second, NONE, kNonStrictMode);
+  JSReceiver::SetElement(clone, 1, first, NONE, kNonStrictMode);
 
   CHECK_EQ(obj->GetElement(isolate, 1), clone->GetElement(isolate, 0));
   CHECK_EQ(obj->GetElement(isolate, 0), clone->GetElement(isolate, 1));
@@ -3516,3 +3516,36 @@
   marking->Step(100 * MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
   ASSERT(marking->IsComplete());
 }
+
+
+TEST(DisableInlineAllocation) {
+  i::FLAG_allow_natives_syntax = true;
+  CcTest::InitializeVM();
+  v8::HandleScope scope(CcTest::isolate());
+  CompileRun("function test() {"
+             "  var x = [];"
+             "  for (var i = 0; i < 10; i++) {"
+             "    x[i] = [ {}, [1,2,3], [1,x,3] ];"
+             "  }"
+             "}"
+             "function run() {"
+             "  %OptimizeFunctionOnNextCall(test);"
+             "  test();"
+             "  %DeoptimizeFunction(test);"
+             "}");
+
+  // Warm-up with inline allocation enabled.
+  CompileRun("test(); test(); run();");
+
+  // Run test with inline allocation disabled.
+  CcTest::heap()->DisableInlineAllocation();
+  CompileRun("run()");
+
+  // Run test with inline allocation disabled and pretenuring.
+  CcTest::heap()->SetNewSpaceHighPromotionModeActive(true);
+  CompileRun("run()");
+
+  // Run test with inline allocation re-enabled.
+  CcTest::heap()->EnableInlineAllocation();
+  CompileRun("run()");
+}
diff --git a/test/mjsunit/array-constructor-feedback.js b/test/mjsunit/array-constructor-feedback.js
index 72ff12c..bfe50d2 100644
--- a/test/mjsunit/array-constructor-feedback.js
+++ b/test/mjsunit/array-constructor-feedback.js
@@ -138,8 +138,8 @@
   })();
 
 
-  // Test: Ensure that bailouts from the stub don't deopt a crankshafted
-  // method with a call to that stub.
+  // Test: Ensure that inlined array calls in crankshaft learn from deopts
+  // based on the move to a dictionary for the array.
   (function() {
     function bar(len) {
       return new Array(len);
@@ -152,10 +152,16 @@
     a = bar(10);
     assertKind(elements_kind.fast, a);
     assertOptimized(bar);
-    // The stub bails out, but the method call should be fine.
+    // bar should deopt because the length is too large.
     a = bar(100000);
-    assertOptimized(bar);
+    assertUnoptimized(bar);
     assertKind(elements_kind.dictionary, a);
+    // The allocation site now has feedback that means the array constructor
+    // will not be inlined.
+    %OptimizeFunctionOnNextCall(bar);
+    a = bar(100000);
+    assertKind(elements_kind.dictionary, a);
+    assertOptimized(bar);
 
     // If the argument isn't a smi, it bails out as well
     a = bar("oops");
@@ -172,8 +178,12 @@
     barn(1, 2, 3);
     assertOptimized(barn);
     a = barn(1, "oops", 3);
-    // The stub should bail out but the method should remain optimized.
+    // The method should deopt, but learn from the failure to avoid inlining
+    // the array.
     assertKind(elements_kind.fast, a);
+    assertUnoptimized(barn);
+    %OptimizeFunctionOnNextCall(barn);
+    a = barn(1, "oops", 3);
     assertOptimized(barn);
   })();
 
@@ -219,4 +229,29 @@
     assertFalse(Realm.eval(contextB, "bar2();") instanceof Array);
     assertTrue(Realm.eval(contextB, "bar2() instanceof Array"));
   })();
+
+  // Test: create array with packed feedback, then optimize/inline
+  // function. Verify that if we ask for a holey array then we deopt.
+  // Reoptimization will proceed with the correct feedback and we
+  // won't deopt anymore.
+  (function() {
+    function bar(len) { return new Array(len); }
+    bar(0);
+    bar(0);
+    %OptimizeFunctionOnNextCall(bar);
+    a = bar(0);
+    assertOptimized(bar);
+    assertFalse(isHoley(a));
+    a = bar(1);  // ouch!
+    assertUnoptimized(bar);
+    assertTrue(isHoley(a));
+    // Try again
+    %OptimizeFunctionOnNextCall(bar);
+    a = bar(100);
+    assertOptimized(bar);
+    assertTrue(isHoley(a));
+    a = bar(0);
+    assertOptimized(bar);
+    assertTrue(isHoley(a));
+  })();
 }
diff --git a/test/mjsunit/harmony/object-observe.js b/test/mjsunit/harmony/object-observe.js
index 39bf6a5..72a9cad 100644
--- a/test/mjsunit/harmony/object-observe.js
+++ b/test/mjsunit/harmony/object-observe.js
@@ -614,6 +614,69 @@
   }
 ]);
 
+// ArrayPush cached stub
+reset();
+
+function pushMultiple(arr) {
+  arr.push('a');
+  arr.push('b');
+  arr.push('c');
+}
+
+for (var i = 0; i < 5; i++) {
+  var arr = [];
+  pushMultiple(arr);
+}
+
+for (var i = 0; i < 5; i++) {
+  reset();
+  var arr = [];
+  Object.observe(arr, observer.callback);
+  pushMultiple(arr);
+  Object.unobserve(arr, observer.callback);
+  Object.deliverChangeRecords(observer.callback);
+  observer.assertCallbackRecords([
+    { object: arr, type: 'add', name: '0' },
+    { object: arr, type: 'update', name: 'length', oldValue: 0 },
+    { object: arr, type: 'add', name: '1' },
+    { object: arr, type: 'update', name: 'length', oldValue: 1 },
+    { object: arr, type: 'add', name: '2' },
+    { object: arr, type: 'update', name: 'length', oldValue: 2 },
+  ]);
+}
+
+
+// ArrayPop cached stub
+reset();
+
+function popMultiple(arr) {
+  arr.pop();
+  arr.pop();
+  arr.pop();
+}
+
+for (var i = 0; i < 5; i++) {
+  var arr = ['a', 'b', 'c'];
+  popMultiple(arr);
+}
+
+for (var i = 0; i < 5; i++) {
+  reset();
+  var arr = ['a', 'b', 'c'];
+  Object.observe(arr, observer.callback);
+  popMultiple(arr);
+  Object.unobserve(arr, observer.callback);
+  Object.deliverChangeRecords(observer.callback);
+  observer.assertCallbackRecords([
+    { object: arr, type: 'delete', name: '2', oldValue: 'c' },
+    { object: arr, type: 'update', name: 'length', oldValue: 3 },
+    { object: arr, type: 'delete', name: '1', oldValue: 'b' },
+    { object: arr, type: 'update', name: 'length', oldValue: 2 },
+    { object: arr, type: 'delete', name: '0', oldValue: 'a' },
+    { object: arr, type: 'update', name: 'length', oldValue: 1 },
+  ]);
+}
+
 
 reset();
 function RecursiveThingy() {}
diff --git a/test/mjsunit/load-callback-from-value-classic.js b/test/mjsunit/load-callback-from-value-classic.js
new file mode 100644
index 0000000..0030c61
--- /dev/null
+++ b/test/mjsunit/load-callback-from-value-classic.js
@@ -0,0 +1,38 @@
+// Copyright 2013 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+//       notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+//       copyright notice, this list of conditions and the following
+//       disclaimer in the documentation and/or other materials provided
+//       with the distribution.
+//     * Neither the name of Google Inc. nor the names of its
+//       contributors may be used to endorse or promote products derived
+//       from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+Object.defineProperty(Boolean.prototype, "v",
+                      {get:function() { return this; }});
+
+function f(b) {
+  return b.v;
+}
+
+assertEquals("object", typeof f(true));
+assertEquals("object", typeof f(true));
+assertEquals("object", typeof f(true));
+assertEquals("object", typeof f(true));
diff --git a/test/mjsunit/regress/regress-crbug-318671.js b/test/mjsunit/regress/regress-crbug-318671.js
new file mode 100644
index 0000000..54a7d5e
--- /dev/null
+++ b/test/mjsunit/regress/regress-crbug-318671.js
@@ -0,0 +1,38 @@
+// Copyright 2013 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+//       notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+//       copyright notice, this list of conditions and the following
+//       disclaimer in the documentation and/or other materials provided
+//       with the distribution.
+//     * Neither the name of Google Inc. nor the names of its
+//       contributors may be used to endorse or promote products derived
+//       from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax
+
+function add(x, y) { return x + y; }
+
+print(add({ a: 1 }, "a"));
+print(add({ b: 1 }, "b"));
+print(add({ c: 1 }, "c"));
+
+%OptimizeFunctionOnNextCall(add);
+
+print(add("a", 1));
diff --git a/tools/gyp/v8.gyp b/tools/gyp/v8.gyp
index 5baa245..f7e94a7 100644
--- a/tools/gyp/v8.gyp
+++ b/tools/gyp/v8.gyp
@@ -217,6 +217,8 @@
         '../../src/allocation.h',
         '../../src/allocation-site-scopes.cc',
         '../../src/allocation-site-scopes.h',
+        '../../src/allocation-tracker.cc',
+        '../../src/allocation-tracker.h',
         '../../src/api.cc',
         '../../src/api.h',
         '../../src/apiutils.h',
diff --git a/tools/push-to-trunk/push_to_trunk.py b/tools/push-to-trunk/push_to_trunk.py
index 9f72cd2..669ba52 100755
--- a/tools/push-to-trunk/push_to_trunk.py
+++ b/tools/push-to-trunk/push_to_trunk.py
@@ -224,7 +224,7 @@
     TextToFile(GetLastChangeLogEntries(self.Config(CHANGELOG_FILE)),
                self.Config(CHANGELOG_ENTRY_FILE))
 
-    if self.Git("cl dcommit -v", "PRESUBMIT_TREE_CHECK=\"skip\"") is None:
+    if self.Git("cl dcommit -f", "PRESUBMIT_TREE_CHECK=\"skip\"") is None:
       self.Die("'git cl dcommit' failed, please try again.")
 
 
diff --git a/tools/push-to-trunk/test_scripts.py b/tools/push-to-trunk/test_scripts.py
index 0975b0d..727e93b 100644
--- a/tools/push-to-trunk/test_scripts.py
+++ b/tools/push-to-trunk/test_scripts.py
@@ -384,7 +384,7 @@
        " 2 files changed\n",
         CheckPreparePush],
       ["cl upload -r \"reviewer@chromium.org\" --send-mail", "done\n"],
-      ["cl dcommit -v", "Closing issue\n"],
+      ["cl dcommit -f", "Closing issue\n"],
       ["svn fetch", "fetch result\n"],
       ["checkout svn/bleeding_edge", ""],
       [("log -1 --format=%H --grep=\"Prepare push to trunk.  "