Version 3.23.16

Array builtins need to be prevented from changing frozen objects, and changing structure on sealed objects (Chromium issue 299979).

Performance and stability improvements on all platforms.

git-svn-id: http://v8.googlecode.com/svn/trunk@18178 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
diff --git a/ChangeLog b/ChangeLog
index 36d1d4d..7759b08 100644
--- a/ChangeLog
+++ b/ChangeLog
@@ -1,3 +1,11 @@
+2013-12-02: Version 3.23.16
+
+        Array builtins need to be prevented from changing frozen objects, and
+        changing structure on sealed objects (Chromium issue 299979).
+
+        Performance and stability improvements on all platforms.
+
+
 2013-11-29: Version 3.23.15
 
         Fix context register allocation in LTransitionElementsKind
diff --git a/Makefile b/Makefile
index 9000d39..2ff2cdb 100644
--- a/Makefile
+++ b/Makefile
@@ -355,8 +355,8 @@
 FASTTESTMODES = ia32.release,x64.release,ia32.debug,x64.debug,arm.debug
 
 quickcheck:
-	@$(MAKE) all optdebug=on; \
-	tools/run-tests.py $(TESTJOBS) --outdir=$(OUTDIR) \
+	@$(MAKE) all optdebug=on
+	@tools/run-tests.py $(TESTJOBS) --outdir=$(OUTDIR) \
 	    --arch-and-mode=$(FASTTESTMODES) $(FASTTESTFLAGS) $(TESTFLAGS)
 qc: quickcheck
 
diff --git a/src/arm/code-stubs-arm.cc b/src/arm/code-stubs-arm.cc
index 09eca72..d63bc65 100644
--- a/src/arm/code-stubs-arm.cc
+++ b/src/arm/code-stubs-arm.cc
@@ -693,27 +693,12 @@
 }
 
 
-bool WriteInt32ToHeapNumberStub::IsPregenerated(Isolate* isolate) {
-  // These variants are compiled ahead of time.  See next method.
-  if (the_int_.is(r1) && the_heap_number_.is(r0) && scratch_.is(r2)) {
-    return true;
-  }
-  if (the_int_.is(r2) && the_heap_number_.is(r0) && scratch_.is(r3)) {
-    return true;
-  }
-  // Other register combinations are generated as and when they are needed,
-  // so it is unsafe to call them from stubs (we can't generate a stub while
-  // we are generating a stub).
-  return false;
-}
-
-
 void WriteInt32ToHeapNumberStub::GenerateFixedRegStubsAheadOfTime(
     Isolate* isolate) {
   WriteInt32ToHeapNumberStub stub1(r1, r0, r2);
   WriteInt32ToHeapNumberStub stub2(r2, r0, r3);
-  stub1.GetCode(isolate)->set_is_pregenerated(true);
-  stub2.GetCode(isolate)->set_is_pregenerated(true);
+  stub1.GetCode(isolate);
+  stub2.GetCode(isolate);
 }
 
 
@@ -1674,18 +1659,11 @@
 }
 
 
-bool CEntryStub::IsPregenerated(Isolate* isolate) {
-  return (!save_doubles_ || isolate->fp_stubs_generated()) &&
-          result_size_ == 1;
-}
-
-
 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
   CEntryStub::GenerateAheadOfTime(isolate);
   WriteInt32ToHeapNumberStub::GenerateFixedRegStubsAheadOfTime(isolate);
   StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate);
   StubFailureTrampolineStub::GenerateAheadOfTime(isolate);
-  RecordWriteStub::GenerateFixedRegStubsAheadOfTime(isolate);
   ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate);
   CreateAllocationSiteStub::GenerateAheadOfTime(isolate);
   BinaryOpStub::GenerateAheadOfTime(isolate);
@@ -1707,16 +1685,13 @@
   if (!stub.FindCodeInCache(&store_buffer_overflow_code, isolate)) {
       store_buffer_overflow_code = *stub.GetCode(isolate);
   }
-  save_doubles_code->set_is_pregenerated(true);
-  store_buffer_overflow_code->set_is_pregenerated(true);
   isolate->set_fp_stubs_generated(true);
 }
 
 
 void CEntryStub::GenerateAheadOfTime(Isolate* isolate) {
   CEntryStub stub(1, kDontSaveFPRegs);
-  Handle<Code> code = stub.GetCode(isolate);
-  code->set_is_pregenerated(true);
+  stub.GetCode(isolate);
 }
 
 
@@ -5347,89 +5322,13 @@
 }
 
 
-struct AheadOfTimeWriteBarrierStubList {
-  Register object, value, address;
-  RememberedSetAction action;
-};
-
-
-#define REG(Name) { kRegister_ ## Name ## _Code }
-
-static const AheadOfTimeWriteBarrierStubList kAheadOfTime[] = {
-  // Used in RegExpExecStub.
-  { REG(r6), REG(r4), REG(r3), EMIT_REMEMBERED_SET },
-  // Used in CompileArrayPushCall.
-  // Also used in StoreIC::GenerateNormal via GenerateDictionaryStore.
-  // Also used in KeyedStoreIC::GenerateGeneric.
-  { REG(r3), REG(r4), REG(r5), EMIT_REMEMBERED_SET },
-  // Used in StoreStubCompiler::CompileStoreField via GenerateStoreField.
-  { REG(r1), REG(r2), REG(r3), EMIT_REMEMBERED_SET },
-  { REG(r3), REG(r2), REG(r1), EMIT_REMEMBERED_SET },
-  // Used in KeyedStoreStubCompiler::CompileStoreField via GenerateStoreField.
-  { REG(r2), REG(r1), REG(r3), EMIT_REMEMBERED_SET },
-  { REG(r3), REG(r1), REG(r2), EMIT_REMEMBERED_SET },
-  // KeyedStoreStubCompiler::GenerateStoreFastElement.
-  { REG(r3), REG(r2), REG(r4), EMIT_REMEMBERED_SET },
-  { REG(r2), REG(r3), REG(r4), EMIT_REMEMBERED_SET },
-  // ElementsTransitionGenerator::GenerateMapChangeElementTransition
-  // and ElementsTransitionGenerator::GenerateSmiToDouble
-  // and ElementsTransitionGenerator::GenerateDoubleToObject
-  { REG(r2), REG(r3), REG(r9), EMIT_REMEMBERED_SET },
-  { REG(r2), REG(r3), REG(r9), OMIT_REMEMBERED_SET },
-  // ElementsTransitionGenerator::GenerateDoubleToObject
-  { REG(r6), REG(r2), REG(r0), EMIT_REMEMBERED_SET },
-  { REG(r2), REG(r6), REG(r9), EMIT_REMEMBERED_SET },
-  // StoreArrayLiteralElementStub::Generate
-  { REG(r5), REG(r0), REG(r6), EMIT_REMEMBERED_SET },
-  // FastNewClosureStub::Generate
-  { REG(r2), REG(r4), REG(r1), EMIT_REMEMBERED_SET },
-  // StringAddStub::Generate
-  { REG(r3), REG(r1), REG(r4), EMIT_REMEMBERED_SET },
-  { REG(r3), REG(r0), REG(r4), EMIT_REMEMBERED_SET },
-  // Null termination.
-  { REG(no_reg), REG(no_reg), REG(no_reg), EMIT_REMEMBERED_SET}
-};
-
-#undef REG
-
-
-bool RecordWriteStub::IsPregenerated(Isolate* isolate) {
-  for (const AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime;
-       !entry->object.is(no_reg);
-       entry++) {
-    if (object_.is(entry->object) &&
-        value_.is(entry->value) &&
-        address_.is(entry->address) &&
-        remembered_set_action_ == entry->action &&
-        save_fp_regs_mode_ == kDontSaveFPRegs) {
-      return true;
-    }
-  }
-  return false;
-}
-
-
 void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(
     Isolate* isolate) {
   StoreBufferOverflowStub stub1(kDontSaveFPRegs);
-  stub1.GetCode(isolate)->set_is_pregenerated(true);
+  stub1.GetCode(isolate);
   // Hydrogen code stubs need stub2 at snapshot time.
   StoreBufferOverflowStub stub2(kSaveFPRegs);
-  stub2.GetCode(isolate)->set_is_pregenerated(true);
-}
-
-
-void RecordWriteStub::GenerateFixedRegStubsAheadOfTime(Isolate* isolate) {
-  for (const AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime;
-       !entry->object.is(no_reg);
-       entry++) {
-    RecordWriteStub stub(entry->object,
-                         entry->value,
-                         entry->address,
-                         entry->action,
-                         kDontSaveFPRegs);
-    stub.GetCode(isolate)->set_is_pregenerated(true);
-  }
+  stub2.GetCode(isolate);
 }
 
 
@@ -5738,7 +5637,6 @@
 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
   if (masm->isolate()->function_entry_hook() != NULL) {
     PredictableCodeSizeScope predictable(masm, 4 * Assembler::kInstrSize);
-    AllowStubCallsScope allow_stub_calls(masm, true);
     ProfileEntryHookStub stub;
     __ push(lr);
     __ CallStub(&stub);
@@ -5931,12 +5829,12 @@
   for (int i = 0; i <= to_index; ++i) {
     ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
     T stub(kind);
-    stub.GetCode(isolate)->set_is_pregenerated(true);
+    stub.GetCode(isolate);
     if (AllocationSite::GetMode(kind) != DONT_TRACK_ALLOCATION_SITE ||
         (!FLAG_track_allocation_sites &&
          (kind == initial_kind || kind == initial_holey_kind))) {
       T stub1(kind, CONTEXT_CHECK_REQUIRED, DISABLE_ALLOCATION_SITES);
-      stub1.GetCode(isolate)->set_is_pregenerated(true);
+      stub1.GetCode(isolate);
     }
   }
 }
@@ -5958,11 +5856,11 @@
   for (int i = 0; i < 2; i++) {
     // For internal arrays we only need a few things
     InternalArrayNoArgumentConstructorStub stubh1(kinds[i]);
-    stubh1.GetCode(isolate)->set_is_pregenerated(true);
+    stubh1.GetCode(isolate);
     InternalArraySingleArgumentConstructorStub stubh2(kinds[i]);
-    stubh2.GetCode(isolate)->set_is_pregenerated(true);
+    stubh2.GetCode(isolate);
     InternalArrayNArgumentsConstructorStub stubh3(kinds[i]);
-    stubh3.GetCode(isolate)->set_is_pregenerated(true);
+    stubh3.GetCode(isolate);
   }
 }
 
diff --git a/src/arm/code-stubs-arm.h b/src/arm/code-stubs-arm.h
index c03d8f2..e400686 100644
--- a/src/arm/code-stubs-arm.h
+++ b/src/arm/code-stubs-arm.h
@@ -68,7 +68,6 @@
 
   void Generate(MacroAssembler* masm);
 
-  virtual bool IsPregenerated(Isolate* isolate) V8_OVERRIDE { return true; }
   static void GenerateFixedRegStubsAheadOfTime(Isolate* isolate);
   virtual bool SometimesSetsUpAFrame() { return false; }
 
@@ -231,7 +230,6 @@
         the_heap_number_(the_heap_number),
         scratch_(scratch) { }
 
-  virtual bool IsPregenerated(Isolate* isolate) V8_OVERRIDE;
   static void GenerateFixedRegStubsAheadOfTime(Isolate* isolate);
 
  private:
@@ -279,8 +277,6 @@
     INCREMENTAL_COMPACTION
   };
 
-  virtual bool IsPregenerated(Isolate* isolate) V8_OVERRIDE;
-  static void GenerateFixedRegStubsAheadOfTime(Isolate* isolate);
   virtual bool SometimesSetsUpAFrame() { return false; }
 
   static void PatchBranchIntoNop(MacroAssembler* masm, int pos) {
diff --git a/src/arm/lithium-arm.cc b/src/arm/lithium-arm.cc
index 7c1f65f..7b605f7 100644
--- a/src/arm/lithium-arm.cc
+++ b/src/arm/lithium-arm.cc
@@ -2016,7 +2016,7 @@
       LOperand* value = UseRegisterAtStart(val);
       if (val->CheckFlag(HInstruction::kUint32)) {
         LNumberTagU* result = new(zone()) LNumberTagU(value);
-        return AssignEnvironment(AssignPointerMap(DefineSameAsFirst(result)));
+        return AssignEnvironment(AssignPointerMap(DefineAsRegister(result)));
       } else if (val->HasRange() && val->range()->IsInSmiRange()) {
         return DefineAsRegister(new(zone()) LSmiTag(value));
       } else {
diff --git a/src/arm/lithium-codegen-arm.cc b/src/arm/lithium-codegen-arm.cc
index 5aebada..4e1cc2f 100644
--- a/src/arm/lithium-codegen-arm.cc
+++ b/src/arm/lithium-codegen-arm.cc
@@ -4762,14 +4762,13 @@
     LNumberTagU* instr_;
   };
 
-  LOperand* input = instr->value();
-  ASSERT(input->IsRegister() && input->Equals(instr->result()));
-  Register reg = ToRegister(input);
+  Register input = ToRegister(instr->value());
+  Register result = ToRegister(instr->result());
 
   DeferredNumberTagU* deferred = new(zone()) DeferredNumberTagU(this, instr);
-  __ cmp(reg, Operand(Smi::kMaxValue));
+  __ cmp(input, Operand(Smi::kMaxValue));
   __ b(hi, deferred->entry());
-  __ SmiTag(reg, reg);
+  __ SmiTag(result, input);
   __ bind(deferred->exit());
 }
 
diff --git a/src/arm/macro-assembler-arm.cc b/src/arm/macro-assembler-arm.cc
index 66894dd..5f6076b 100644
--- a/src/arm/macro-assembler-arm.cc
+++ b/src/arm/macro-assembler-arm.cc
@@ -44,7 +44,6 @@
 MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size)
     : Assembler(arg_isolate, buffer, size),
       generating_stub_(false),
-      allow_stub_calls_(true),
       has_frame_(false) {
   if (isolate() != NULL) {
     code_object_ = Handle<Object>(isolate()->heap()->undefined_value(),
@@ -1229,7 +1228,7 @@
 }
 
 
-void MacroAssembler::InvokeFunction(Handle<JSFunction> function,
+void MacroAssembler::InvokeFunction(Register function,
                                     const ParameterCount& expected,
                                     const ParameterCount& actual,
                                     InvokeFlag flag,
@@ -1238,8 +1237,10 @@
   // You can't call a function without a valid frame.
   ASSERT(flag == JUMP_FUNCTION || has_frame());
 
+  // Contract with called JS functions requires that function is passed in r1.
+  ASSERT(function.is(r1));
+
   // Get the function and setup the context.
-  Move(r1, function);
   ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
 
   // We call indirectly through the code field in the function to
@@ -1250,6 +1251,17 @@
 }
 
 
+void MacroAssembler::InvokeFunction(Handle<JSFunction> function,
+                                    const ParameterCount& expected,
+                                    const ParameterCount& actual,
+                                    InvokeFlag flag,
+                                    const CallWrapper& call_wrapper,
+                                    CallKind call_kind) {
+  Move(r1, function);
+  InvokeFunction(r1, expected, actual, flag, call_wrapper, call_kind);
+}
+
+
 void MacroAssembler::IsObjectJSObjectType(Register heap_object,
                                           Register map,
                                           Register scratch,
@@ -2276,8 +2288,6 @@
 
 
 void MacroAssembler::TailCallStub(CodeStub* stub, Condition cond) {
-  ASSERT(allow_stub_calls_ ||
-         stub->CompilingCallsToThisStubIsGCSafe(isolate()));
   Jump(stub->GetCode(isolate()), RelocInfo::CODE_TARGET, cond);
 }
 
@@ -2422,8 +2432,7 @@
 
 
 bool MacroAssembler::AllowThisStubCall(CodeStub* stub) {
-  if (!has_frame_ && stub->SometimesSetsUpAFrame()) return false;
-  return allow_stub_calls_ || stub->CompilingCallsToThisStubIsGCSafe(isolate());
+  return has_frame_ || !stub->SometimesSetsUpAFrame();
 }
 
 
diff --git a/src/arm/macro-assembler-arm.h b/src/arm/macro-assembler-arm.h
index d223b57..f71c1a3 100644
--- a/src/arm/macro-assembler-arm.h
+++ b/src/arm/macro-assembler-arm.h
@@ -613,6 +613,13 @@
                       const CallWrapper& call_wrapper,
                       CallKind call_kind);
 
+  void InvokeFunction(Register function,
+                      const ParameterCount& expected,
+                      const ParameterCount& actual,
+                      InvokeFlag flag,
+                      const CallWrapper& call_wrapper,
+                      CallKind call_kind);
+
   void InvokeFunction(Handle<JSFunction> function,
                       const ParameterCount& expected,
                       const ParameterCount& actual,
@@ -1191,8 +1198,6 @@
   // Verify restrictions about code generated in stubs.
   void set_generating_stub(bool value) { generating_stub_ = value; }
   bool generating_stub() { return generating_stub_; }
-  void set_allow_stub_calls(bool value) { allow_stub_calls_ = value; }
-  bool allow_stub_calls() { return allow_stub_calls_; }
   void set_has_frame(bool value) { has_frame_ = value; }
   bool has_frame() { return has_frame_; }
   inline bool AllowThisStubCall(CodeStub* stub);
@@ -1471,7 +1476,6 @@
   MemOperand SafepointRegistersAndDoublesSlot(Register reg);
 
   bool generating_stub_;
-  bool allow_stub_calls_;
   bool has_frame_;
   // This handle will be patched with the code object on installation.
   Handle<Object> code_object_;
diff --git a/src/arm/stub-cache-arm.cc b/src/arm/stub-cache-arm.cc
index b93d67b..60c28da 100644
--- a/src/arm/stub-cache-arm.cc
+++ b/src/arm/stub-cache-arm.cc
@@ -742,36 +742,6 @@
 }
 
 
-static void GenerateCallFunction(MacroAssembler* masm,
-                                 Handle<Object> object,
-                                 const ParameterCount& arguments,
-                                 Label* miss,
-                                 ExtraICState extra_ic_state) {
-  // ----------- S t a t e -------------
-  //  -- r0: receiver
-  //  -- r1: function to call
-  // -----------------------------------
-
-  // Check that the function really is a function.
-  __ JumpIfSmi(r1, miss);
-  __ CompareObjectType(r1, r3, r3, JS_FUNCTION_TYPE);
-  __ b(ne, miss);
-
-  if (object->IsGlobalObject()) {
-    const int argc = arguments.immediate();
-    const int receiver_offset = argc * kPointerSize;
-    __ ldr(r3, FieldMemOperand(r0, GlobalObject::kGlobalReceiverOffset));
-    __ str(r3, MemOperand(sp, receiver_offset));
-  }
-
-  // Invoke the function.
-  CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state)
-      ? CALL_AS_FUNCTION
-      : CALL_AS_METHOD;
-  __ InvokeFunction(r1, arguments, JUMP_FUNCTION, NullCallWrapper(), call_kind);
-}
-
-
 static void PushInterceptorArguments(MacroAssembler* masm,
                                      Register receiver,
                                      Register holder,
@@ -955,7 +925,7 @@
 
 class CallInterceptorCompiler BASE_EMBEDDED {
  public:
-  CallInterceptorCompiler(StubCompiler* stub_compiler,
+  CallInterceptorCompiler(CallStubCompiler* stub_compiler,
                           const ParameterCount& arguments,
                           Register name,
                           ExtraICState extra_ic_state)
@@ -1067,13 +1037,8 @@
       GenerateFastApiDirectCall(
           masm, optimization, arguments_.immediate(), false);
     } else {
-      CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state_)
-          ? CALL_AS_FUNCTION
-          : CALL_AS_METHOD;
       Handle<JSFunction> function = optimization.constant_function();
-      ParameterCount expected(function);
-      __ InvokeFunction(function, expected, arguments_,
-                        JUMP_FUNCTION, NullCallWrapper(), call_kind);
+      stub_compiler_->GenerateJumpFunctionIgnoreReceiver(function);
     }
 
     // Deferred code for fast API call case---clean preallocated space.
@@ -1139,7 +1104,7 @@
     __ b(ne, interceptor_succeeded);
   }
 
-  StubCompiler* stub_compiler_;
+  CallStubCompiler* stub_compiler_;
   const ParameterCount& arguments_;
   Register name_;
   ExtraICState extra_ic_state_;
@@ -1541,6 +1506,15 @@
 }
 
 
+void CallStubCompiler::GenerateFunctionCheck(Register function,
+                                             Register scratch,
+                                             Label* miss) {
+  __ JumpIfSmi(function, miss);
+  __ CompareObjectType(function, scratch, scratch, JS_FUNCTION_TYPE);
+  __ b(ne, miss);
+}
+
+
 void CallStubCompiler::GenerateLoadFunctionFromCell(
     Handle<Cell> cell,
     Handle<JSFunction> function,
@@ -1556,9 +1530,7 @@
     // the nice side effect that multiple closures based on the same
     // function can all use this call IC. Before we load through the
     // function, we have to verify that it still is a function.
-    __ JumpIfSmi(r1, miss);
-    __ CompareObjectType(r1, r3, r3, JS_FUNCTION_TYPE);
-    __ b(ne, miss);
+    GenerateFunctionCheck(r1, r3, miss);
 
     // Check the shared function info. Make sure it hasn't changed.
     __ Move(r3, Handle<SharedFunctionInfo>(function->shared()));
@@ -1590,8 +1562,7 @@
       object, holder, name, RECEIVER_MAP_CHECK, &miss);
   GenerateFastPropertyLoad(masm(), r1, reg, index.is_inobject(holder),
                            index.translate(holder), Representation::Tagged());
-
-  GenerateCallFunction(masm(), object, arguments(), &miss, extra_state_);
+  GenerateJumpFunction(object, r1, &miss);
 
   HandlerFrontendFooter(&miss);
 
@@ -1640,10 +1611,12 @@
     Handle<JSFunction> function,
     Handle<String> name,
     Code::StubType type) {
-  // If object is not an array or is observed, bail out to regular call.
+  // If object is not an array or is observed or sealed, bail out to regular
+  // call.
   if (!object->IsJSArray() ||
       !cell.is_null() ||
-      Handle<JSArray>::cast(object)->map()->is_observed()) {
+      Handle<JSArray>::cast(object)->map()->is_observed() ||
+      !Handle<JSArray>::cast(object)->map()->is_extensible()) {
     return Handle<Code>::null();
   }
 
@@ -1883,10 +1856,12 @@
     Handle<JSFunction> function,
     Handle<String> name,
     Code::StubType type) {
-  // If object is not an array or is observed, bail out to regular call.
+  // If object is not an array or is observed or sealed, bail out to regular
+  // call.
   if (!object->IsJSArray() ||
       !cell.is_null() ||
-      Handle<JSArray>::cast(object)->map()->is_observed()) {
+      Handle<JSArray>::cast(object)->map()->is_observed() ||
+      !Handle<JSArray>::cast(object)->map()->is_extensible()) {
     return Handle<Code>::null();
   }
 
@@ -2006,8 +1981,7 @@
   __ bind(&miss);
   // Restore function name in r2.
   __ Move(r2, name);
-  __ bind(&name_miss);
-  GenerateMissBranch();
+  HandlerFrontendFooter(&name_miss);
 
   // Return the generated code.
   return GetCode(type, name);
@@ -2073,8 +2047,7 @@
   __ bind(&miss);
   // Restore function name in r2.
   __ Move(r2, name);
-  __ bind(&name_miss);
-  GenerateMissBranch();
+  HandlerFrontendFooter(&name_miss);
 
   // Return the generated code.
   return GetCode(type, name);
@@ -2121,12 +2094,10 @@
   StubRuntimeCallHelper call_helper;
   generator.GenerateSlow(masm(), call_helper);
 
-  // Tail call the full function. We do not have to patch the receiver
-  // because the function makes no use of it.
   __ bind(&slow);
-  ParameterCount expected(function);
-  __ InvokeFunction(function, expected, arguments(),
-                    JUMP_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
+  // We do not have to patch the receiver because the function makes no use of
+  // it.
+  GenerateJumpFunctionIgnoreReceiver(function);
 
   HandlerFrontendFooter(&miss);
 
@@ -2214,11 +2185,9 @@
   __ Ret();
 
   __ bind(&slow);
-  // Tail call the full function. We do not have to patch the receiver
-  // because the function makes no use of it.
-  ParameterCount expected(function);
-  __ InvokeFunction(function, expected, arguments(),
-                    JUMP_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
+  // We do not have to patch the receiver because the function makes no use of
+  // it.
+  GenerateJumpFunctionIgnoreReceiver(function);
 
   HandlerFrontendFooter(&miss);
 
@@ -2296,12 +2265,10 @@
   __ Drop(argc + 1);
   __ Ret();
 
-  // Tail call the full function. We do not have to patch the receiver
-  // because the function makes no use of it.
   __ bind(&slow);
-  ParameterCount expected(function);
-  __ InvokeFunction(function, expected, arguments(),
-                    JUMP_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
+  // We do not have to patch the receiver because the function makes no use of
+  // it.
+  GenerateJumpFunctionIgnoreReceiver(function);
 
   HandlerFrontendFooter(&miss);
 
@@ -2354,8 +2321,7 @@
   __ bind(&miss);
   FreeSpaceForFastApiCall(masm());
 
-  __ bind(&miss_before_stack_reserved);
-  GenerateMissBranch();
+  HandlerFrontendFooter(&miss_before_stack_reserved);
 
   // Return the generated code.
   return GetCode(function);
@@ -2472,39 +2438,17 @@
 }
 
 
-void CallStubCompiler::CompileHandlerBackend(Handle<JSFunction> function) {
-  CallKind call_kind = CallICBase::Contextual::decode(extra_state_)
-      ? CALL_AS_FUNCTION
-      : CALL_AS_METHOD;
-  ParameterCount expected(function);
-  __ InvokeFunction(function, expected, arguments(),
-                    JUMP_FUNCTION, NullCallWrapper(), call_kind);
-}
-
-
-Handle<Code> CallStubCompiler::CompileCallConstant(
-    Handle<Object> object,
-    Handle<JSObject> holder,
-    Handle<Name> name,
-    CheckType check,
-    Handle<JSFunction> function) {
-  if (HasCustomCallGenerator(function)) {
-    Handle<Code> code = CompileCustomCall(object, holder,
-                                          Handle<Cell>::null(),
-                                          function, Handle<String>::cast(name),
-                                          Code::FAST);
-    // A null handle means bail out to the regular compiler code below.
-    if (!code.is_null()) return code;
-  }
-
-  Label miss;
-  HandlerFrontendHeader(object, holder, name, check, &miss);
+void CallStubCompiler::GenerateJumpFunction(Handle<Object> object,
+                                            Register function,
+                                            Label* miss) {
+  ASSERT(function.is(r1));
+  // Check that the function really is a function.
+  GenerateFunctionCheck(function, r3, miss);
   PatchGlobalProxy(object);
-  CompileHandlerBackend(function);
-  HandlerFrontendFooter(&miss);
 
-  // Return the generated code.
-  return GetCode(function);
+  // Invoke the function.
+  __ InvokeFunction(r1, arguments(), JUMP_FUNCTION,
+                    NullCallWrapper(), call_kind());
 }
 
 
@@ -2531,11 +2475,9 @@
   // Restore receiver.
   __ ldr(r0, MemOperand(sp, argc * kPointerSize));
 
-  GenerateCallFunction(masm(), object, arguments(), &miss, extra_state_);
+  GenerateJumpFunction(object, r1, &miss);
 
-  // Handle call cache miss.
-  __ bind(&miss);
-  GenerateMissBranch();
+  HandlerFrontendFooter(&miss);
 
   // Return the generated code.
   return GetCode(Code::FAST, name);
@@ -2558,26 +2500,13 @@
 
   Label miss;
   HandlerFrontendHeader(object, holder, name, RECEIVER_MAP_CHECK, &miss);
+  // Potentially loads a closure that matches the shared function info of the
+  // function, rather than function.
   GenerateLoadFunctionFromCell(cell, function, &miss);
-  PatchGlobalProxy(object);
 
-  // Set up the context (function already in r1).
-  __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
-
-  // Jump to the cached code (tail call).
   Counters* counters = isolate()->counters();
   __ IncrementCounter(counters->call_global_inline(), 1, r3, r4);
-  ParameterCount expected(function->shared()->formal_parameter_count());
-  CallKind call_kind = CallICBase::Contextual::decode(extra_state_)
-      ? CALL_AS_FUNCTION
-      : CALL_AS_METHOD;
-  // We call indirectly through the code field in the function to
-  // allow recompilation to take effect without changing any of the
-  // call sites.
-  __ ldr(r3, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
-  __ InvokeCode(r3, expected, arguments(), JUMP_FUNCTION,
-                NullCallWrapper(), call_kind);
-
+  GenerateJumpFunction(object, r1, function);
   HandlerFrontendFooter(&miss);
 
   // Return the generated code.
diff --git a/src/array.js b/src/array.js
index e98d7f5..26bf728 100644
--- a/src/array.js
+++ b/src/array.js
@@ -425,6 +425,11 @@
     return;
   }
 
+  if ($Object.isSealed(this)) {
+    throw MakeTypeError("array_functions_change_sealed",
+                        ["Array.prototype.pop"]);
+  }
+
   if (%IsObserved(this))
     return ObservedArrayPop.call(this, n);
 
@@ -462,11 +467,16 @@
                         ["Array.prototype.push"]);
   }
 
+  var n = TO_UINT32(this.length);
+  var m = %_ArgumentsLength();
+  if (m > 0 && $Object.isSealed(this)) {
+    throw MakeTypeError("array_functions_change_sealed",
+                        ["Array.prototype.push"]);
+  }
+
   if (%IsObserved(this))
     return ObservedArrayPush.apply(this, arguments);
 
-  var n = TO_UINT32(this.length);
-  var m = %_ArgumentsLength();
   for (var i = 0; i < m; i++) {
     this[i+n] = %_Arguments(i);
   }
@@ -604,6 +614,11 @@
     return;
   }
 
+  if ($Object.isSealed(this)) {
+    throw MakeTypeError("array_functions_change_sealed",
+                        ["Array.prototype.shift"]);
+  }
+
   if (%IsObserved(this))
     return ObservedArrayShift.call(this, len);
 
@@ -645,15 +660,32 @@
                         ["Array.prototype.unshift"]);
   }
 
+  var len = TO_UINT32(this.length);
+  var num_arguments = %_ArgumentsLength();
+  var is_sealed = $Object.isSealed(this);
+
+  if (num_arguments > 0 && is_sealed) {
+    throw MakeTypeError("array_functions_change_sealed",
+                        ["Array.prototype.unshift"]);
+  }
+
   if (%IsObserved(this))
     return ObservedArrayUnshift.apply(this, arguments);
 
-  var len = TO_UINT32(this.length);
-  var num_arguments = %_ArgumentsLength();
-
-  if (IS_ARRAY(this)) {
+  if (IS_ARRAY(this) && !is_sealed) {
     SmartMove(this, 0, 0, len, num_arguments);
   } else {
+    if (num_arguments == 0 && $Object.isFrozen(this)) {
+      // In the zero argument case, values from the prototype come into the
+      // object. This can't be allowed on frozen arrays.
+      for (var i = 0; i < len; i++) {
+        if (!this.hasOwnProperty(i) && !IS_UNDEFINED(this[i])) {
+          throw MakeTypeError("array_functions_on_frozen",
+                              ["Array.prototype.shift"]);
+        }
+      }
+    }
+
     SimpleMove(this, 0, 0, len, num_arguments);
   }
 
@@ -663,7 +695,7 @@
 
   this.length = len + num_arguments;
 
-  return len + num_arguments;
+  return this.length;
 }
 
 
@@ -802,6 +834,14 @@
   deleted_elements.length = del_count;
   var num_elements_to_add = num_arguments > 2 ? num_arguments - 2 : 0;
 
+  if (del_count != num_elements_to_add && $Object.isSealed(this)) {
+    throw MakeTypeError("array_functions_change_sealed",
+                        ["Array.prototype.splice"]);
+  } else if (del_count > 0 && $Object.isFrozen(this)) {
+    throw MakeTypeError("array_functions_on_frozen",
+                        ["Array.prototype.splice"]);
+  }
+
   var use_simple_splice = true;
   if (IS_ARRAY(this) &&
       num_elements_to_add !== del_count) {
diff --git a/src/builtins.cc b/src/builtins.cc
index 1b62223..bc6dceb 100644
--- a/src/builtins.cc
+++ b/src/builtins.cc
@@ -307,6 +307,7 @@
   if (!receiver->IsJSArray()) return NULL;
   JSArray* array = JSArray::cast(receiver);
   if (array->map()->is_observed()) return NULL;
+  if (!array->map()->is_extensible()) return NULL;
   HeapObject* elms = array->elements();
   Map* map = elms->map();
   if (map == heap->fixed_array_map()) {
@@ -1420,16 +1421,6 @@
 }
 
 
-static void Generate_StoreIC_GlobalProxy(MacroAssembler* masm) {
-  StoreIC::GenerateRuntimeSetProperty(masm, kNonStrictMode);
-}
-
-
-static void Generate_StoreIC_GlobalProxy_Strict(MacroAssembler* masm) {
-  StoreIC::GenerateRuntimeSetProperty(masm, kStrictMode);
-}
-
-
 static void Generate_StoreIC_Setter_ForDeopt(MacroAssembler* masm) {
   StoreStubCompiler::GenerateStoreViaSetter(masm, Handle<JSFunction>());
 }
diff --git a/src/builtins.h b/src/builtins.h
index f76ed02..edc13f7 100644
--- a/src/builtins.h
+++ b/src/builtins.h
@@ -157,16 +157,12 @@
                                     kNoExtraICState)                    \
   V(StoreIC_Generic_Strict,         STORE_IC, GENERIC,                  \
                                     StoreIC::kStrictModeState)          \
-  V(StoreIC_GlobalProxy,            STORE_IC, GENERIC,                  \
-                                    kNoExtraICState)                    \
   V(StoreIC_Initialize_Strict,      STORE_IC, UNINITIALIZED,            \
                                     StoreIC::kStrictModeState)          \
   V(StoreIC_PreMonomorphic_Strict,  STORE_IC, PREMONOMORPHIC,           \
                                     StoreIC::kStrictModeState)          \
   V(StoreIC_Megamorphic_Strict,     STORE_IC, MEGAMORPHIC,              \
                                     StoreIC::kStrictModeState)          \
-  V(StoreIC_GlobalProxy_Strict,     STORE_IC, GENERIC,                  \
-                                    StoreIC::kStrictModeState)          \
   V(StoreIC_Setter_ForDeopt,        STORE_IC, MONOMORPHIC,              \
                                     StoreIC::kStrictModeState)          \
                                                                         \
diff --git a/src/code-stubs-hydrogen.cc b/src/code-stubs-hydrogen.cc
index f717d75..03b8c18 100644
--- a/src/code-stubs-hydrogen.cc
+++ b/src/code-stubs-hydrogen.cc
@@ -251,9 +251,6 @@
     // Update the static counter each time a new code stub is generated.
     isolate->counters()->code_stubs()->Increment();
 
-    // Nested stubs are not allowed for leaves.
-    AllowStubCallsScope allow_scope(&masm, false);
-
     // Generate the code for the stub.
     masm.set_generating_stub(true);
     NoCurrentFrameScope scope(&masm);
diff --git a/src/code-stubs.cc b/src/code-stubs.cc
index 87fca37..3f26e3f 100644
--- a/src/code-stubs.cc
+++ b/src/code-stubs.cc
@@ -110,9 +110,6 @@
     // Update the static counter each time a new code stub is generated.
     isolate->counters()->code_stubs()->Increment();
 
-    // Nested stubs are not allowed for leaves.
-    AllowStubCallsScope allow_scope(&masm, false);
-
     // Generate the code for the stub.
     masm.set_generating_stub(true);
     NoCurrentFrameScope scope(&masm);
@@ -148,7 +145,6 @@
   if (UseSpecialCache()
       ? FindCodeInSpecialCache(&code, isolate)
       : FindCodeInCache(&code, isolate)) {
-    ASSERT(IsPregenerated(isolate) == code->is_pregenerated());
     ASSERT(GetCodeKind() == code->kind());
     return Handle<Code>(code);
   }
@@ -978,7 +974,7 @@
 
 void CreateAllocationSiteStub::GenerateAheadOfTime(Isolate* isolate) {
   CreateAllocationSiteStub stub;
-  stub.GetCode(isolate)->set_is_pregenerated(true);
+  stub.GetCode(isolate);
 }
 
 
@@ -1117,14 +1113,14 @@
 void StubFailureTrampolineStub::GenerateAheadOfTime(Isolate* isolate) {
   StubFailureTrampolineStub stub1(NOT_JS_FUNCTION_STUB_MODE);
   StubFailureTrampolineStub stub2(JS_FUNCTION_STUB_MODE);
-  stub1.GetCode(isolate)->set_is_pregenerated(true);
-  stub2.GetCode(isolate)->set_is_pregenerated(true);
+  stub1.GetCode(isolate);
+  stub2.GetCode(isolate);
 }
 
 
 void StubFailureTailCallTrampolineStub::GenerateAheadOfTime(Isolate* isolate) {
   StubFailureTailCallTrampolineStub stub;
-  stub.GetCode(isolate)->set_is_pregenerated(true);
+  stub.GetCode(isolate);
 }
 
 
diff --git a/src/code-stubs.h b/src/code-stubs.h
index e20060f..a852a5c 100644
--- a/src/code-stubs.h
+++ b/src/code-stubs.h
@@ -161,16 +161,6 @@
 
   virtual ~CodeStub() {}
 
-  bool CompilingCallsToThisStubIsGCSafe(Isolate* isolate) {
-    bool is_pregenerated = IsPregenerated(isolate);
-    Code* code = NULL;
-    CHECK(!is_pregenerated || FindCodeInCache(&code, isolate));
-    return is_pregenerated;
-  }
-
-  // See comment above, where Instanceof is defined.
-  virtual bool IsPregenerated(Isolate* isolate) { return false; }
-
   static void GenerateStubsAheadOfTime(Isolate* isolate);
   static void GenerateStubsRequiringBuiltinsAheadOfTime(Isolate* isolate);
   static void GenerateFPStubs(Isolate* isolate);
@@ -683,8 +673,6 @@
 
   virtual Handle<Code> GenerateCode(Isolate* isolate);
 
-  virtual bool IsPregenerated(Isolate* isolate) V8_OVERRIDE { return true; }
-
   static void GenerateAheadOfTime(Isolate* isolate);
 
   virtual void InitializeInterfaceDescriptor(
@@ -1467,7 +1455,6 @@
   // time, so it's OK to call it from other stubs that can't cope with GC during
   // their code generation.  On machines that always have gp registers (x64) we
   // can generate both variants ahead of time.
-  virtual bool IsPregenerated(Isolate* isolate) V8_OVERRIDE;
   static void GenerateAheadOfTime(Isolate* isolate);
 
  protected:
@@ -1819,24 +1806,6 @@
 };
 
 
-class AllowStubCallsScope {
- public:
-  AllowStubCallsScope(MacroAssembler* masm, bool allow)
-       : masm_(masm), previous_allow_(masm->allow_stub_calls()) {
-    masm_->set_allow_stub_calls(allow);
-  }
-  ~AllowStubCallsScope() {
-    masm_->set_allow_stub_calls(previous_allow_);
-  }
-
- private:
-  MacroAssembler* masm_;
-  bool previous_allow_;
-
-  DISALLOW_COPY_AND_ASSIGN(AllowStubCallsScope);
-};
-
-
 class KeyedLoadDictionaryElementStub : public HydrogenCodeStub {
  public:
   KeyedLoadDictionaryElementStub() {}
@@ -2084,11 +2053,6 @@
     return ContextCheckModeBits::decode(bit_field_);
   }
 
-  virtual bool IsPregenerated(Isolate* isolate) V8_OVERRIDE {
-    // We only pre-generate stubs that verify correct context
-    return context_mode() == CONTEXT_CHECK_REQUIRED;
-  }
-
   static void GenerateStubsAheadOfTime(Isolate* isolate);
   static void InstallDescriptors(Isolate* isolate);
 
@@ -2185,7 +2149,6 @@
     kind_ = kind;
   }
 
-  virtual bool IsPregenerated(Isolate* isolate) V8_OVERRIDE { return true; }
   static void GenerateStubsAheadOfTime(Isolate* isolate);
   static void InstallDescriptors(Isolate* isolate);
 
@@ -2449,8 +2412,6 @@
   explicit StubFailureTrampolineStub(StubFunctionMode function_mode)
       : fp_registers_(CanUseFPRegisters()), function_mode_(function_mode) {}
 
-  virtual bool IsPregenerated(Isolate* isolate) V8_OVERRIDE { return true; }
-
   static void GenerateAheadOfTime(Isolate* isolate);
 
  private:
@@ -2476,8 +2437,6 @@
  public:
   StubFailureTailCallTrampolineStub() : fp_registers_(CanUseFPRegisters()) {}
 
-  virtual bool IsPregenerated(Isolate* isolate) V8_OVERRIDE { return true; }
-
   static void GenerateAheadOfTime(Isolate* isolate);
 
  private:
diff --git a/src/hydrogen-check-elimination.cc b/src/hydrogen-check-elimination.cc
index 5333221..bf8257c 100644
--- a/src/hydrogen-check-elimination.cc
+++ b/src/hydrogen-check-elimination.cc
@@ -97,6 +97,10 @@
         ReduceCheckMapValue(HCheckMapValue::cast(instr));
         break;
       }
+      case HValue::kCheckHeapObject: {
+        ReduceCheckHeapObject(HCheckHeapObject::cast(instr));
+        break;
+      }
       default: {
         // If the instruction changes maps uncontrollably, drop everything.
         if (instr->CheckGVNFlag(kChangesMaps) ||
@@ -105,7 +109,8 @@
         }
       }
       // Improvements possible:
-      // - eliminate HCheckSmi and HCheckHeapObject
+      // - eliminate redundant HCheckSmi, HCheckInstanceType instructions
+      // - track which values have been HCheckHeapObject'd
     }
 
     return this;
@@ -236,6 +241,14 @@
     }
   }
 
+  void ReduceCheckHeapObject(HCheckHeapObject* instr) {
+    if (FindMaps(instr->value()->ActualValue()) != NULL) {
+      // If the object has known maps, it's definitely a heap object.
+      instr->DeleteAndReplaceWith(instr->value());
+      INC_STAT(removed_cho_);
+    }
+  }
+
   void ReduceStoreNamedField(HStoreNamedField* instr) {
     HValue* object = instr->object()->ActualValue();
     if (instr->has_transition()) {
@@ -488,15 +501,19 @@
 // Are we eliminated yet?
 void HCheckEliminationPhase::PrintStats() {
 #if DEBUG
-  if (redundant_ > 0)      PrintF("  redundant   = %2d\n", redundant_);
-  if (removed_ > 0)        PrintF("  removed     = %2d\n", removed_);
-  if (narrowed_ > 0)       PrintF("  narrowed    = %2d\n", narrowed_);
-  if (loads_ > 0)          PrintF("  loads       = %2d\n", loads_);
-  if (empty_ > 0)          PrintF("  empty       = %2d\n", empty_);
-  if (compares_true_ > 0)  PrintF("  cmp_true    = %2d\n", compares_true_);
-  if (compares_false_ > 0) PrintF("  cmp_false   = %2d\n", compares_false_);
-  if (transitions_ > 0)    PrintF("  transitions = %2d\n", transitions_);
+  #define PRINT_STAT(x) if (x##_ > 0) PrintF(" %-16s = %2d\n", #x, x##_)
+#else
+  #define PRINT_STAT(x)
 #endif
+  PRINT_STAT(redundant);
+  PRINT_STAT(removed);
+  PRINT_STAT(removed_cho);
+  PRINT_STAT(narrowed);
+  PRINT_STAT(loads);
+  PRINT_STAT(empty);
+  PRINT_STAT(compares_true);
+  PRINT_STAT(compares_false);
+  PRINT_STAT(transitions);
 }
 
 } }  // namespace v8::internal
diff --git a/src/hydrogen-check-elimination.h b/src/hydrogen-check-elimination.h
index f38e615..b429b17 100644
--- a/src/hydrogen-check-elimination.h
+++ b/src/hydrogen-check-elimination.h
@@ -39,16 +39,19 @@
 class HCheckEliminationPhase : public HPhase {
  public:
   explicit HCheckEliminationPhase(HGraph* graph)
-    : HPhase("H_Check Elimination", graph),
-      aliasing_(),
-      redundant_(0),
-      removed_(0),
-      narrowed_(0),
-      loads_(0),
-      empty_(0),
-      compares_true_(0),
-      compares_false_(0),
-      transitions_(0) { }
+      : HPhase("H_Check Elimination", graph), aliasing_() {
+#ifdef DEBUG
+    redundant_ = 0;
+    removed_ = 0;
+    removed_cho_ = 0;
+    narrowed_ = 0;
+    loads_ = 0;
+    empty_ = 0;
+    compares_true_ = 0;
+    compares_false_ = 0;
+    transitions_ = 0;
+#endif
+  }
 
   void Run();
 
@@ -58,14 +61,17 @@
   void PrintStats();
 
   HAliasAnalyzer* aliasing_;
+#ifdef DEBUG
   int redundant_;
   int removed_;
+  int removed_cho_;
   int narrowed_;
   int loads_;
   int empty_;
   int compares_true_;
   int compares_false_;
   int transitions_;
+#endif
 };
 
 
diff --git a/src/ia32/code-stubs-ia32.cc b/src/ia32/code-stubs-ia32.cc
index 715feca..f0dcf71 100644
--- a/src/ia32/code-stubs-ia32.cc
+++ b/src/ia32/code-stubs-ia32.cc
@@ -2952,18 +2952,11 @@
 }
 
 
-bool CEntryStub::IsPregenerated(Isolate* isolate) {
-  return (!save_doubles_ || isolate->fp_stubs_generated()) &&
-          result_size_ == 1;
-}
-
-
 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
   CEntryStub::GenerateAheadOfTime(isolate);
   StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate);
   StubFailureTrampolineStub::GenerateAheadOfTime(isolate);
   // It is important that the store buffer overflow stubs are generated first.
-  RecordWriteStub::GenerateFixedRegStubsAheadOfTime(isolate);
   ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate);
   CreateAllocationSiteStub::GenerateAheadOfTime(isolate);
   if (Serializer::enabled()) {
@@ -2984,7 +2977,6 @@
     if (!save_doubles.FindCodeInCache(&save_doubles_code, isolate)) {
       save_doubles_code = *(save_doubles.GetCode(isolate));
     }
-    save_doubles_code->set_is_pregenerated(true);
     isolate->set_fp_stubs_generated(true);
   }
 }
@@ -2992,8 +2984,7 @@
 
 void CEntryStub::GenerateAheadOfTime(Isolate* isolate) {
   CEntryStub stub(1, kDontSaveFPRegs);
-  Handle<Code> code = stub.GetCode(isolate);
-  code->set_is_pregenerated(true);
+  stub.GetCode(isolate);
 }
 
 
@@ -5287,90 +5278,13 @@
 }
 
 
-struct AheadOfTimeWriteBarrierStubList {
-  Register object, value, address;
-  RememberedSetAction action;
-};
-
-
-#define REG(Name) { kRegister_ ## Name ## _Code }
-
-static const AheadOfTimeWriteBarrierStubList kAheadOfTime[] = {
-  // Used in RegExpExecStub.
-  { REG(ebx), REG(eax), REG(edi), EMIT_REMEMBERED_SET },
-  // Used in CompileArrayPushCall.
-  { REG(ebx), REG(ecx), REG(edx), EMIT_REMEMBERED_SET },
-  { REG(ebx), REG(edi), REG(edx), OMIT_REMEMBERED_SET },
-  // Used in StoreStubCompiler::CompileStoreField and
-  // KeyedStoreStubCompiler::CompileStoreField via GenerateStoreField.
-  { REG(edx), REG(ecx), REG(ebx), EMIT_REMEMBERED_SET },
-  // GenerateStoreField calls the stub with two different permutations of
-  // registers.  This is the second.
-  { REG(ebx), REG(ecx), REG(edx), EMIT_REMEMBERED_SET },
-  // StoreIC::GenerateNormal via GenerateDictionaryStore
-  { REG(ebx), REG(edi), REG(edx), EMIT_REMEMBERED_SET },
-  // KeyedStoreIC::GenerateGeneric.
-  { REG(ebx), REG(edx), REG(ecx), EMIT_REMEMBERED_SET},
-  // KeyedStoreStubCompiler::GenerateStoreFastElement.
-  { REG(edi), REG(ebx), REG(ecx), EMIT_REMEMBERED_SET},
-  { REG(edx), REG(edi), REG(ebx), EMIT_REMEMBERED_SET},
-  // ElementsTransitionGenerator::GenerateMapChangeElementTransition
-  // and ElementsTransitionGenerator::GenerateSmiToDouble
-  // and ElementsTransitionGenerator::GenerateDoubleToObject
-  { REG(edx), REG(ebx), REG(edi), EMIT_REMEMBERED_SET},
-  { REG(edx), REG(ebx), REG(edi), OMIT_REMEMBERED_SET},
-  // ElementsTransitionGenerator::GenerateDoubleToObject
-  { REG(eax), REG(edx), REG(esi), EMIT_REMEMBERED_SET},
-  { REG(edx), REG(eax), REG(edi), EMIT_REMEMBERED_SET},
-  // StoreArrayLiteralElementStub::Generate
-  { REG(ebx), REG(eax), REG(ecx), EMIT_REMEMBERED_SET},
-  // FastNewClosureStub and StringAddStub::Generate
-  { REG(ecx), REG(edx), REG(ebx), EMIT_REMEMBERED_SET},
-  // StringAddStub::Generate
-  { REG(ecx), REG(eax), REG(ebx), EMIT_REMEMBERED_SET},
-  // Null termination.
-  { REG(no_reg), REG(no_reg), REG(no_reg), EMIT_REMEMBERED_SET}
-};
-
-#undef REG
-
-bool RecordWriteStub::IsPregenerated(Isolate* isolate) {
-  for (const AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime;
-       !entry->object.is(no_reg);
-       entry++) {
-    if (object_.is(entry->object) &&
-        value_.is(entry->value) &&
-        address_.is(entry->address) &&
-        remembered_set_action_ == entry->action &&
-        save_fp_regs_mode_ == kDontSaveFPRegs) {
-      return true;
-    }
-  }
-  return false;
-}
-
-
 void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(
     Isolate* isolate) {
   StoreBufferOverflowStub stub(kDontSaveFPRegs);
-  stub.GetCode(isolate)->set_is_pregenerated(true);
+  stub.GetCode(isolate);
   if (CpuFeatures::IsSafeForSnapshot(SSE2)) {
     StoreBufferOverflowStub stub2(kSaveFPRegs);
-    stub2.GetCode(isolate)->set_is_pregenerated(true);
-  }
-}
-
-
-void RecordWriteStub::GenerateFixedRegStubsAheadOfTime(Isolate* isolate) {
-  for (const AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime;
-       !entry->object.is(no_reg);
-       entry++) {
-    RecordWriteStub stub(entry->object,
-                         entry->value,
-                         entry->address,
-                         entry->action,
-                         kDontSaveFPRegs);
-    stub.GetCode(isolate)->set_is_pregenerated(true);
+    stub2.GetCode(isolate);
   }
 }
 
@@ -5703,10 +5617,6 @@
 
 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
   if (masm->isolate()->function_entry_hook() != NULL) {
-    // It's always safe to call the entry hook stub, as the hook itself
-    // is not allowed to call back to V8.
-    AllowStubCallsScope allow_stub_calls(masm, true);
-
     ProfileEntryHookStub stub;
     masm->CallStub(&stub);
   }
@@ -5865,12 +5775,12 @@
   for (int i = 0; i <= to_index; ++i) {
     ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
     T stub(kind);
-    stub.GetCode(isolate)->set_is_pregenerated(true);
+    stub.GetCode(isolate);
     if (AllocationSite::GetMode(kind) != DONT_TRACK_ALLOCATION_SITE ||
         (!FLAG_track_allocation_sites &&
          (kind == initial_kind || kind == initial_holey_kind))) {
       T stub1(kind, CONTEXT_CHECK_REQUIRED, DISABLE_ALLOCATION_SITES);
-      stub1.GetCode(isolate)->set_is_pregenerated(true);
+      stub1.GetCode(isolate);
     }
   }
 }
@@ -5892,11 +5802,11 @@
   for (int i = 0; i < 2; i++) {
     // For internal arrays we only need a few things
     InternalArrayNoArgumentConstructorStub stubh1(kinds[i]);
-    stubh1.GetCode(isolate)->set_is_pregenerated(true);
+    stubh1.GetCode(isolate);
     InternalArraySingleArgumentConstructorStub stubh2(kinds[i]);
-    stubh2.GetCode(isolate)->set_is_pregenerated(true);
+    stubh2.GetCode(isolate);
     InternalArrayNArgumentsConstructorStub stubh3(kinds[i]);
-    stubh3.GetCode(isolate)->set_is_pregenerated(true);
+    stubh3.GetCode(isolate);
   }
 }
 
diff --git a/src/ia32/code-stubs-ia32.h b/src/ia32/code-stubs-ia32.h
index 006651c..1425924 100644
--- a/src/ia32/code-stubs-ia32.h
+++ b/src/ia32/code-stubs-ia32.h
@@ -74,7 +74,6 @@
 
   void Generate(MacroAssembler* masm);
 
-  virtual bool IsPregenerated(Isolate* isolate) V8_OVERRIDE { return true; }
   static void GenerateFixedRegStubsAheadOfTime(Isolate* isolate);
   virtual bool SometimesSetsUpAFrame() { return false; }
 
@@ -303,8 +302,6 @@
     INCREMENTAL_COMPACTION
   };
 
-  virtual bool IsPregenerated(Isolate* isolate) V8_OVERRIDE;
-  static void GenerateFixedRegStubsAheadOfTime(Isolate* isolate);
   virtual bool SometimesSetsUpAFrame() { return false; }
 
   static const byte kTwoByteNopInstruction = 0x3c;  // Cmpb al, #imm8.
diff --git a/src/ia32/macro-assembler-ia32.cc b/src/ia32/macro-assembler-ia32.cc
index b5cefdb..52d42f6 100644
--- a/src/ia32/macro-assembler-ia32.cc
+++ b/src/ia32/macro-assembler-ia32.cc
@@ -46,7 +46,6 @@
 MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size)
     : Assembler(arg_isolate, buffer, size),
       generating_stub_(false),
-      allow_stub_calls_(true),
       has_frame_(false) {
   if (isolate() != NULL) {
     // TODO(titzer): should we just use a null handle here instead?
@@ -2188,8 +2187,6 @@
 
 
 void MacroAssembler::TailCallStub(CodeStub* stub) {
-  ASSERT(allow_stub_calls_ ||
-         stub->CompilingCallsToThisStubIsGCSafe(isolate()));
   jmp(stub->GetCode(isolate()), RelocInfo::CODE_TARGET);
 }
 
@@ -2201,8 +2198,7 @@
 
 
 bool MacroAssembler::AllowThisStubCall(CodeStub* stub) {
-  if (!has_frame_ && stub->SometimesSetsUpAFrame()) return false;
-  return allow_stub_calls_ || stub->CompilingCallsToThisStubIsGCSafe(isolate());
+  return has_frame_ || !stub->SometimesSetsUpAFrame();
 }
 
 
@@ -2634,7 +2630,7 @@
 }
 
 
-void MacroAssembler::InvokeFunction(Handle<JSFunction> function,
+void MacroAssembler::InvokeFunction(Register fun,
                                     const ParameterCount& expected,
                                     const ParameterCount& actual,
                                     InvokeFlag flag,
@@ -2643,18 +2639,25 @@
   // You can't call a function without a valid frame.
   ASSERT(flag == JUMP_FUNCTION || has_frame());
 
-  // Get the function and setup the context.
-  LoadHeapObject(edi, function);
+  ASSERT(fun.is(edi));
   mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
 
-  // We call indirectly through the code field in the function to
-  // allow recompilation to take effect without changing any of the
-  // call sites.
   InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
              expected, actual, flag, call_wrapper, call_kind);
 }
 
 
+void MacroAssembler::InvokeFunction(Handle<JSFunction> function,
+                                    const ParameterCount& expected,
+                                    const ParameterCount& actual,
+                                    InvokeFlag flag,
+                                    const CallWrapper& call_wrapper,
+                                    CallKind call_kind) {
+  LoadHeapObject(edi, function);
+  InvokeFunction(edi, expected, actual, flag, call_wrapper, call_kind);
+}
+
+
 void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id,
                                    InvokeFlag flag,
                                    const CallWrapper& call_wrapper) {
diff --git a/src/ia32/macro-assembler-ia32.h b/src/ia32/macro-assembler-ia32.h
index 2941130..054b164 100644
--- a/src/ia32/macro-assembler-ia32.h
+++ b/src/ia32/macro-assembler-ia32.h
@@ -349,6 +349,13 @@
                       const CallWrapper& call_wrapper,
                       CallKind call_kind);
 
+  void InvokeFunction(Register function,
+                      const ParameterCount& expected,
+                      const ParameterCount& actual,
+                      InvokeFlag flag,
+                      const CallWrapper& call_wrapper,
+                      CallKind call_kind);
+
   void InvokeFunction(Handle<JSFunction> function,
                       const ParameterCount& expected,
                       const ParameterCount& actual,
@@ -905,8 +912,6 @@
   // Verify restrictions about code generated in stubs.
   void set_generating_stub(bool value) { generating_stub_ = value; }
   bool generating_stub() { return generating_stub_; }
-  void set_allow_stub_calls(bool value) { allow_stub_calls_ = value; }
-  bool allow_stub_calls() { return allow_stub_calls_; }
   void set_has_frame(bool value) { has_frame_ = value; }
   bool has_frame() { return has_frame_; }
   inline bool AllowThisStubCall(CodeStub* stub);
@@ -992,7 +997,6 @@
 
  private:
   bool generating_stub_;
-  bool allow_stub_calls_;
   bool has_frame_;
   // This handle will be patched with the code object on installation.
   Handle<Object> code_object_;
diff --git a/src/ia32/stub-cache-ia32.cc b/src/ia32/stub-cache-ia32.cc
index d799c3b..ba57d7e 100644
--- a/src/ia32/stub-cache-ia32.cc
+++ b/src/ia32/stub-cache-ia32.cc
@@ -645,14 +645,13 @@
 
 class CallInterceptorCompiler BASE_EMBEDDED {
  public:
-  CallInterceptorCompiler(StubCompiler* stub_compiler,
+  CallInterceptorCompiler(CallStubCompiler* stub_compiler,
                           const ParameterCount& arguments,
                           Register name,
                           ExtraICState extra_state)
       : stub_compiler_(stub_compiler),
         arguments_(arguments),
-        name_(name),
-        extra_state_(extra_state) {}
+        name_(name) {}
 
   void Compile(MacroAssembler* masm,
                Handle<JSObject> object,
@@ -756,13 +755,8 @@
     if (can_do_fast_api_call) {
       GenerateFastApiCall(masm, optimization, arguments_.immediate());
     } else {
-      CallKind call_kind = CallICBase::Contextual::decode(extra_state_)
-          ? CALL_AS_FUNCTION
-          : CALL_AS_METHOD;
-      Handle<JSFunction> function = optimization.constant_function();
-      ParameterCount expected(function);
-      __ InvokeFunction(function, expected, arguments_,
-                        JUMP_FUNCTION, NullCallWrapper(), call_kind);
+      Handle<JSFunction> fun = optimization.constant_function();
+      stub_compiler_->GenerateJumpFunctionIgnoreReceiver(fun);
     }
 
     // Deferred code for fast API call case---clean preallocated space.
@@ -830,10 +824,9 @@
     __ j(not_equal, interceptor_succeeded);
   }
 
-  StubCompiler* stub_compiler_;
+  CallStubCompiler* stub_compiler_;
   const ParameterCount& arguments_;
   Register name_;
-  ExtraICState extra_state_;
 };
 
 
@@ -1602,6 +1595,15 @@
 }
 
 
+void CallStubCompiler::GenerateFunctionCheck(Register function,
+                                             Register scratch,
+                                             Label* miss) {
+  __ JumpIfSmi(function, miss);
+  __ CmpObjectType(function, JS_FUNCTION_TYPE, scratch);
+  __ j(not_equal, miss);
+}
+
+
 void CallStubCompiler::GenerateLoadFunctionFromCell(
     Handle<Cell> cell,
     Handle<JSFunction> function,
@@ -1621,9 +1623,7 @@
     // the nice side effect that multiple closures based on the same
     // function can all use this call IC. Before we load through the
     // function, we have to verify that it still is a function.
-    __ JumpIfSmi(edi, miss);
-    __ CmpObjectType(edi, JS_FUNCTION_TYPE, ebx);
-    __ j(not_equal, miss);
+    GenerateFunctionCheck(edi, ebx, miss);
 
     // Check the shared function info. Make sure it hasn't changed.
     __ cmp(FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset),
@@ -1656,20 +1656,7 @@
   GenerateFastPropertyLoad(
       masm(), edi, reg, index.is_inobject(holder),
       index.translate(holder), Representation::Tagged());
-
-  // Check that the function really is a function.
-  __ JumpIfSmi(edi, &miss);
-  __ CmpObjectType(edi, JS_FUNCTION_TYPE, ebx);
-  __ j(not_equal, &miss);
-
-  PatchGlobalProxy(object);
-
-  // Invoke the function.
-  CallKind call_kind = CallICBase::Contextual::decode(extra_state_)
-      ? CALL_AS_FUNCTION
-      : CALL_AS_METHOD;
-  __ InvokeFunction(edi, arguments(), JUMP_FUNCTION,
-                    NullCallWrapper(), call_kind);
+  GenerateJumpFunction(object, edi, &miss);
 
   HandlerFrontendFooter(&miss);
 
@@ -1718,10 +1705,12 @@
     Handle<JSFunction> function,
     Handle<String> name,
     Code::StubType type) {
-  // If object is not an array or is observed, bail out to regular call.
+  // If object is not an array or is observed or sealed, bail out to regular
+  // call.
   if (!object->IsJSArray() ||
       !cell.is_null() ||
-      Handle<JSArray>::cast(object)->map()->is_observed()) {
+      Handle<JSArray>::cast(object)->map()->is_observed() ||
+      !Handle<JSArray>::cast(object)->map()->is_extensible()) {
     return Handle<Code>::null();
   }
 
@@ -1961,10 +1950,12 @@
     Handle<JSFunction> function,
     Handle<String> name,
     Code::StubType type) {
-  // If object is not an array or is observed, bail out to regular call.
+  // If object is not an array or is observed or sealed, bail out to regular
+  // call.
   if (!object->IsJSArray() ||
       !cell.is_null() ||
-      Handle<JSArray>::cast(object)->map()->is_observed()) {
+      Handle<JSArray>::cast(object)->map()->is_observed() ||
+      !Handle<JSArray>::cast(object)->map()->is_extensible()) {
     return Handle<Code>::null();
   }
 
@@ -2081,8 +2072,7 @@
   __ bind(&miss);
   // Restore function name in ecx.
   __ Set(ecx, Immediate(name));
-  __ bind(&name_miss);
-  GenerateMissBranch();
+  HandlerFrontendFooter(&name_miss);
 
   // Return the generated code.
   return GetCode(type, name);
@@ -2150,8 +2140,7 @@
   __ bind(&miss);
   // Restore function name in ecx.
   __ Set(ecx, Immediate(name));
-  __ bind(&name_miss);
-  GenerateMissBranch();
+  HandlerFrontendFooter(&name_miss);
 
   // Return the generated code.
   return GetCode(type, name);
@@ -2200,15 +2189,10 @@
   StubRuntimeCallHelper call_helper;
   generator.GenerateSlow(masm(), call_helper);
 
-  // Tail call the full function. We do not have to patch the receiver
-  // because the function makes no use of it.
   __ bind(&slow);
-  CallKind call_kind = CallICBase::Contextual::decode(extra_state_)
-      ? CALL_AS_FUNCTION
-      : CALL_AS_METHOD;
-  ParameterCount expected(function);
-  __ InvokeFunction(function, expected, arguments(),
-                    JUMP_FUNCTION, NullCallWrapper(), call_kind);
+  // We do not have to patch the receiver because the function makes no use of
+  // it.
+  GenerateJumpFunctionIgnoreReceiver(function);
 
   HandlerFrontendFooter(&miss);
 
@@ -2315,12 +2299,10 @@
   __ mov(eax, Operand(esp, 1 * kPointerSize));
   __ ret(2 * kPointerSize);
 
-  // Tail call the full function. We do not have to patch the receiver
-  // because the function makes no use of it.
   __ bind(&slow);
-  ParameterCount expected(function);
-  __ InvokeFunction(function, expected, arguments(),
-                    JUMP_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
+  // We do not have to patch the receiver because the function makes no use of
+  // it.
+  GenerateJumpFunctionIgnoreReceiver(function);
 
   HandlerFrontendFooter(&miss);
 
@@ -2404,12 +2386,10 @@
   __ mov(FieldOperand(eax, HeapNumber::kMantissaOffset), ecx);
   __ ret(2 * kPointerSize);
 
-  // Tail call the full function. We do not have to patch the receiver
-  // because the function makes no use of it.
   __ bind(&slow);
-  ParameterCount expected(function);
-  __ InvokeFunction(function, expected, arguments(),
-                    JUMP_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
+  // We do not have to patch the receiver because the function makes no use of
+  // it.
+  GenerateJumpFunctionIgnoreReceiver(function);
 
   HandlerFrontendFooter(&miss);
 
@@ -2469,8 +2449,7 @@
   __ bind(&miss);
   __ add(esp, Immediate(kFastApiCallArguments * kPointerSize));
 
-  __ bind(&miss_before_stack_reserved);
-  GenerateMissBranch();
+  HandlerFrontendFooter(&miss_before_stack_reserved);
 
   // Return the generated code.
   return GetCode(function);
@@ -2579,40 +2558,18 @@
 }
 
 
-void CallStubCompiler::CompileHandlerBackend(Handle<JSFunction> function) {
-  CallKind call_kind = CallICBase::Contextual::decode(extra_state_)
-      ? CALL_AS_FUNCTION
-      : CALL_AS_METHOD;
-  ParameterCount expected(function);
-  __ InvokeFunction(function, expected, arguments(),
-                    JUMP_FUNCTION, NullCallWrapper(), call_kind);
-}
+void CallStubCompiler::GenerateJumpFunction(Handle<Object> object,
+                                            Register function,
+                                            Label* miss) {
+  // Check that the function really is a function.
+  GenerateFunctionCheck(function, ebx, miss);
 
-
-Handle<Code> CallStubCompiler::CompileCallConstant(
-    Handle<Object> object,
-    Handle<JSObject> holder,
-    Handle<Name> name,
-    CheckType check,
-    Handle<JSFunction> function) {
-
-  if (HasCustomCallGenerator(function)) {
-    Handle<Code> code = CompileCustomCall(object, holder,
-                                          Handle<Cell>::null(),
-                                          function, Handle<String>::cast(name),
-                                          Code::FAST);
-    // A null handle means bail out to the regular compiler code below.
-    if (!code.is_null()) return code;
-  }
-
-  Label miss;
-  HandlerFrontendHeader(object, holder, name, check, &miss);
+  if (!function.is(edi)) __ mov(edi, function);
   PatchGlobalProxy(object);
-  CompileHandlerBackend(function);
-  HandlerFrontendFooter(&miss);
 
-  // Return the generated code.
-  return GetCode(function);
+  // Invoke the function.
+  __ InvokeFunction(edi, arguments(), JUMP_FUNCTION,
+                    NullCallWrapper(), call_kind());
 }
 
 
@@ -2639,29 +2596,9 @@
   // Restore receiver.
   __ mov(edx, Operand(esp, (argc + 1) * kPointerSize));
 
-  // Check that the function really is a function.
-  __ JumpIfSmi(eax, &miss);
-  __ CmpObjectType(eax, JS_FUNCTION_TYPE, ebx);
-  __ j(not_equal, &miss);
+  GenerateJumpFunction(object, eax, &miss);
 
-  // Patch the receiver on the stack with the global proxy if
-  // necessary.
-  if (object->IsGlobalObject()) {
-    __ mov(edx, FieldOperand(edx, GlobalObject::kGlobalReceiverOffset));
-    __ mov(Operand(esp, (argc + 1) * kPointerSize), edx);
-  }
-
-  // Invoke the function.
-  __ mov(edi, eax);
-  CallKind call_kind = CallICBase::Contextual::decode(extra_state_)
-      ? CALL_AS_FUNCTION
-      : CALL_AS_METHOD;
-  __ InvokeFunction(edi, arguments(), JUMP_FUNCTION,
-                    NullCallWrapper(), call_kind);
-
-  // Handle load cache miss.
-  __ bind(&miss);
-  GenerateMissBranch();
+  HandlerFrontendFooter(&miss);
 
   // Return the generated code.
   return GetCode(Code::FAST, name);
@@ -2684,25 +2621,10 @@
 
   Label miss;
   HandlerFrontendHeader(object, holder, name, RECEIVER_MAP_CHECK, &miss);
+  // Potentially loads a closure that matches the shared function info of the
+  // function, rather than function.
   GenerateLoadFunctionFromCell(cell, function, &miss);
-  PatchGlobalProxy(object);
-
-  // Set up the context (function already in edi).
-  __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
-
-  // Jump to the cached code (tail call).
-  Counters* counters = isolate()->counters();
-  __ IncrementCounter(counters->call_global_inline(), 1);
-  ParameterCount expected(function->shared()->formal_parameter_count());
-  CallKind call_kind = CallICBase::Contextual::decode(extra_state_)
-      ? CALL_AS_FUNCTION
-      : CALL_AS_METHOD;
-  // We call indirectly through the code field in the function to
-  // allow recompilation to take effect without changing any of the
-  // call sites.
-  __ InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
-                expected, arguments(), JUMP_FUNCTION,
-                NullCallWrapper(), call_kind);
+  GenerateJumpFunction(object, edi, function);
 
   HandlerFrontendFooter(&miss);
 
diff --git a/src/ic.cc b/src/ic.cc
index 46820fe..6ee728a 100644
--- a/src/ic.cc
+++ b/src/ic.cc
@@ -1454,7 +1454,8 @@
     }
 
     if (lookup->IsPropertyCallbacks()) return true;
-
+    // JSGlobalProxy always goes via the runtime, so it's safe to cache.
+    if (receiver->IsJSGlobalProxy()) return true;
     // Currently normal holders in the prototype chain are not supported. They
     // would require a runtime positive lookup and verification that the details
     // have not changed.
@@ -1561,20 +1562,6 @@
     return *result;
   }
 
-  if (receiver->IsJSGlobalProxy()) {
-    if (FLAG_use_ic && kind() != Code::KEYED_STORE_IC) {
-      // Generate a generic stub that goes to the runtime when we see a global
-      // proxy as receiver.
-      Handle<Code> stub = global_proxy_stub();
-      set_target(*stub);
-      TRACE_IC("StoreIC", name);
-    }
-    Handle<Object> result = JSReceiver::SetProperty(
-        receiver, name, value, NONE, strict_mode(), store_mode);
-    RETURN_IF_EMPTY_HANDLE(isolate(), result);
-    return *result;
-  }
-
   LookupResult lookup(isolate());
   bool can_store = LookupForWrite(receiver, name, value, &lookup, this);
   if (!can_store &&
@@ -1611,7 +1598,6 @@
                            Handle<JSObject> receiver,
                            Handle<String> name,
                            Handle<Object> value) {
-  ASSERT(!receiver->IsJSGlobalProxy());
   ASSERT(lookup->IsFound());
 
   // These are not cacheable, so we never see such LookupResults here.
@@ -1629,6 +1615,7 @@
                                      Handle<String> name,
                                      Handle<Object> value,
                                      InlineCacheHolderFlag cache_holder) {
+  if (object->IsJSGlobalProxy()) return slow_stub();
   ASSERT(cache_holder == OWN_MAP);
   // This is currently guaranteed by checks in StoreIC::Store.
   Handle<JSObject> receiver = Handle<JSObject>::cast(object);
diff --git a/src/ic.h b/src/ic.h
index 3db352a..b1a47e2 100644
--- a/src/ic.h
+++ b/src/ic.h
@@ -642,14 +642,6 @@
     }
   }
 
-  virtual Handle<Code> global_proxy_stub() {
-    if (strict_mode() == kStrictMode) {
-      return isolate()->builtins()->StoreIC_GlobalProxy_Strict();
-    } else {
-      return isolate()->builtins()->StoreIC_GlobalProxy();
-    }
-  }
-
   // Update the inline cache and the global stub cache based on the
   // lookup result.
   void UpdateCaches(LookupResult* lookup,
diff --git a/src/messages.js b/src/messages.js
index 085b4d8..c709672 100644
--- a/src/messages.js
+++ b/src/messages.js
@@ -111,6 +111,8 @@
   constructor_not_function:      ["Constructor ", "%0", " requires 'new'"],
   not_a_promise:                 ["%0", "is not a promise"],
   promise_cyclic:                ["Chaining cycle detected for promise", "%0"],
+  array_functions_on_frozen:     ["Cannot modify frozen array elements"],
+  array_functions_change_sealed: ["Cannot add/remove sealed array elements"],
   // RangeError
   invalid_array_length:          ["Invalid array length"],
   invalid_array_buffer_length:   ["Invalid array buffer length"],
diff --git a/src/mips/code-stubs-mips.cc b/src/mips/code-stubs-mips.cc
index 0c5e35c..bd42161 100644
--- a/src/mips/code-stubs-mips.cc
+++ b/src/mips/code-stubs-mips.cc
@@ -714,27 +714,6 @@
 }
 
 
-bool WriteInt32ToHeapNumberStub::IsPregenerated(Isolate* isolate) {
-  // These variants are compiled ahead of time.  See next method.
-  if (the_int_.is(a1) &&
-      the_heap_number_.is(v0) &&
-      scratch_.is(a2) &&
-      sign_.is(a3)) {
-    return true;
-  }
-  if (the_int_.is(a2) &&
-      the_heap_number_.is(v0) &&
-      scratch_.is(a3) &&
-      sign_.is(a0)) {
-    return true;
-  }
-  // Other register combinations are generated as and when they are needed,
-  // so it is unsafe to call them from stubs (we can't generate a stub while
-  // we are generating a stub).
-  return false;
-}
-
-
 void WriteInt32ToHeapNumberStub::GenerateFixedRegStubsAheadOfTime(
     Isolate* isolate) {
   WriteInt32ToHeapNumberStub stub1(a1, v0, a2, a3);
@@ -1749,18 +1728,11 @@
 }
 
 
-bool CEntryStub::IsPregenerated(Isolate* isolate) {
-  return (!save_doubles_ || isolate->fp_stubs_generated()) &&
-          result_size_ == 1;
-}
-
-
 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
   CEntryStub::GenerateAheadOfTime(isolate);
   WriteInt32ToHeapNumberStub::GenerateFixedRegStubsAheadOfTime(isolate);
   StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate);
   StubFailureTrampolineStub::GenerateAheadOfTime(isolate);
-  RecordWriteStub::GenerateFixedRegStubsAheadOfTime(isolate);
   ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate);
   CreateAllocationSiteStub::GenerateAheadOfTime(isolate);
   BinaryOpStub::GenerateAheadOfTime(isolate);
@@ -5506,68 +5478,6 @@
 }
 
 
-struct AheadOfTimeWriteBarrierStubList {
-  Register object, value, address;
-  RememberedSetAction action;
-};
-
-
-#define REG(Name) { kRegister_ ## Name ## _Code }
-
-static const AheadOfTimeWriteBarrierStubList kAheadOfTime[] = {
-  // Used in RegExpExecStub.
-  { REG(s2), REG(s0), REG(t3), EMIT_REMEMBERED_SET },
-  // Used in CompileArrayPushCall.
-  // Also used in StoreIC::GenerateNormal via GenerateDictionaryStore.
-  // Also used in KeyedStoreIC::GenerateGeneric.
-  { REG(a3), REG(t0), REG(t1), EMIT_REMEMBERED_SET },
-  // Used in StoreStubCompiler::CompileStoreField via GenerateStoreField.
-  { REG(a1), REG(a2), REG(a3), EMIT_REMEMBERED_SET },
-  { REG(a3), REG(a2), REG(a1), EMIT_REMEMBERED_SET },
-  // Used in KeyedStoreStubCompiler::CompileStoreField via GenerateStoreField.
-  { REG(a2), REG(a1), REG(a3), EMIT_REMEMBERED_SET },
-  { REG(a3), REG(a1), REG(a2), EMIT_REMEMBERED_SET },
-  // KeyedStoreStubCompiler::GenerateStoreFastElement.
-  { REG(a3), REG(a2), REG(t0), EMIT_REMEMBERED_SET },
-  { REG(a2), REG(a3), REG(t0), EMIT_REMEMBERED_SET },
-  // ElementsTransitionGenerator::GenerateMapChangeElementTransition
-  // and ElementsTransitionGenerator::GenerateSmiToDouble
-  // and ElementsTransitionGenerator::GenerateDoubleToObject
-  { REG(a2), REG(a3), REG(t5), EMIT_REMEMBERED_SET },
-  { REG(a2), REG(a3), REG(t5), OMIT_REMEMBERED_SET },
-  // ElementsTransitionGenerator::GenerateDoubleToObject
-  { REG(t2), REG(a2), REG(a0), EMIT_REMEMBERED_SET },
-  { REG(a2), REG(t2), REG(t5), EMIT_REMEMBERED_SET },
-  // StoreArrayLiteralElementStub::Generate
-  { REG(t1), REG(a0), REG(t2), EMIT_REMEMBERED_SET },
-  // FastNewClosureStub::Generate
-  { REG(a2), REG(t0), REG(a1), EMIT_REMEMBERED_SET },
-  // StringAddStub::Generate
-  { REG(t3), REG(a1), REG(t0), EMIT_REMEMBERED_SET },
-  { REG(t3), REG(a0), REG(t0), EMIT_REMEMBERED_SET },
-  // Null termination.
-  { REG(no_reg), REG(no_reg), REG(no_reg), EMIT_REMEMBERED_SET}
-};
-
-#undef REG
-
-
-bool RecordWriteStub::IsPregenerated(Isolate* isolate) {
-  for (const AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime;
-       !entry->object.is(no_reg);
-       entry++) {
-    if (object_.is(entry->object) &&
-        value_.is(entry->value) &&
-        address_.is(entry->address) &&
-        remembered_set_action_ == entry->action &&
-        save_fp_regs_mode_ == kDontSaveFPRegs) {
-      return true;
-    }
-  }
-  return false;
-}
-
-
 void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(
     Isolate* isolate) {
   StoreBufferOverflowStub stub1(kDontSaveFPRegs);
@@ -5578,20 +5488,6 @@
 }
 
 
-void RecordWriteStub::GenerateFixedRegStubsAheadOfTime(Isolate* isolate) {
-  for (const AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime;
-       !entry->object.is(no_reg);
-       entry++) {
-    RecordWriteStub stub(entry->object,
-                         entry->value,
-                         entry->address,
-                         entry->action,
-                         kDontSaveFPRegs);
-    stub.GetCode(isolate)->set_is_pregenerated(true);
-  }
-}
-
-
 bool CodeStub::CanUseFPRegisters() {
   return true;  // FPU is a base requirement for V8.
 }
@@ -5898,7 +5794,6 @@
 
 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
   if (masm->isolate()->function_entry_hook() != NULL) {
-    AllowStubCallsScope allow_stub_calls(masm, true);
     ProfileEntryHookStub stub;
     __ push(ra);
     __ CallStub(&stub);
diff --git a/src/mips/code-stubs-mips.h b/src/mips/code-stubs-mips.h
index 10531a8..c3e05b8 100644
--- a/src/mips/code-stubs-mips.h
+++ b/src/mips/code-stubs-mips.h
@@ -69,7 +69,6 @@
 
   void Generate(MacroAssembler* masm);
 
-  virtual bool IsPregenerated(Isolate* isolate) V8_OVERRIDE { return true; }
   static void GenerateFixedRegStubsAheadOfTime(Isolate* isolate);
   virtual bool SometimesSetsUpAFrame() { return false; }
 
@@ -240,7 +239,6 @@
     ASSERT(SignRegisterBits::is_valid(sign_.code()));
   }
 
-  virtual bool IsPregenerated(Isolate* isolate) V8_OVERRIDE;
   static void GenerateFixedRegStubsAheadOfTime(Isolate* isolate);
 
  private:
@@ -291,8 +289,6 @@
     INCREMENTAL_COMPACTION
   };
 
-  virtual bool IsPregenerated(Isolate* isolate) V8_OVERRIDE;
-  static void GenerateFixedRegStubsAheadOfTime(Isolate* isolate);
   virtual bool SometimesSetsUpAFrame() { return false; }
 
   static void PatchBranchIntoNop(MacroAssembler* masm, int pos) {
diff --git a/src/mips/full-codegen-mips.cc b/src/mips/full-codegen-mips.cc
index 0407f01..a56fbb1 100644
--- a/src/mips/full-codegen-mips.cc
+++ b/src/mips/full-codegen-mips.cc
@@ -631,11 +631,13 @@
   Label done;
   __ bind(materialize_true);
   __ LoadRoot(at, Heap::kTrueValueRootIndex);
+  // Push the value as the following branch can clobber at in long branch mode.
+  __ push(at);
   __ Branch(&done);
   __ bind(materialize_false);
   __ LoadRoot(at, Heap::kFalseValueRootIndex);
-  __ bind(&done);
   __ push(at);
+  __ bind(&done);
 }
 
 
@@ -3734,6 +3736,7 @@
     VisitForAccumulatorValue(args->at(1));
 
     __ pop(a1);
+    __ mov(a0, result_register());  // NewStringAddStub requires args in a0, a1.
     NewStringAddStub stub(STRING_ADD_CHECK_BOTH, NOT_TENURED);
     __ CallStub(&stub);
   } else {
diff --git a/src/mips/lithium-codegen-mips.cc b/src/mips/lithium-codegen-mips.cc
index 71bacba..3fb08ef 100644
--- a/src/mips/lithium-codegen-mips.cc
+++ b/src/mips/lithium-codegen-mips.cc
@@ -4708,13 +4708,12 @@
     LNumberTagU* instr_;
   };
 
-  LOperand* input = instr->value();
-  ASSERT(input->IsRegister() && input->Equals(instr->result()));
-  Register reg = ToRegister(input);
+  Register input = ToRegister(instr->value());
+  Register result = ToRegister(instr->result());
 
   DeferredNumberTagU* deferred = new(zone()) DeferredNumberTagU(this, instr);
-  __ Branch(deferred->entry(), hi, reg, Operand(Smi::kMaxValue));
-  __ SmiTag(reg, reg);
+  __ Branch(deferred->entry(), hi, input, Operand(Smi::kMaxValue));
+  __ SmiTag(result, input);
   __ bind(deferred->exit());
 }
 
diff --git a/src/mips/lithium-mips.cc b/src/mips/lithium-mips.cc
index 1a99bb9..4b161d6 100644
--- a/src/mips/lithium-mips.cc
+++ b/src/mips/lithium-mips.cc
@@ -1940,7 +1940,7 @@
       LOperand* value = UseRegisterAtStart(val);
       if (val->CheckFlag(HInstruction::kUint32)) {
         LNumberTagU* result = new(zone()) LNumberTagU(value);
-        return AssignEnvironment(AssignPointerMap(DefineSameAsFirst(result)));
+        return AssignEnvironment(AssignPointerMap(DefineAsRegister(result)));
       } else if (val->HasRange() && val->range()->IsInSmiRange()) {
         return DefineAsRegister(new(zone()) LSmiTag(value));
       } else {
diff --git a/src/mips/macro-assembler-mips.cc b/src/mips/macro-assembler-mips.cc
index 04aa9c9..f33e6fa 100644
--- a/src/mips/macro-assembler-mips.cc
+++ b/src/mips/macro-assembler-mips.cc
@@ -44,7 +44,6 @@
 MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size)
     : Assembler(arg_isolate, buffer, size),
       generating_stub_(false),
-      allow_stub_calls_(true),
       has_frame_(false) {
   if (isolate() != NULL) {
     code_object_ = Handle<Object>(isolate()->heap()->undefined_value(),
@@ -3728,7 +3727,7 @@
 }
 
 
-void MacroAssembler::InvokeFunction(Handle<JSFunction> function,
+void MacroAssembler::InvokeFunction(Register function,
                                     const ParameterCount& expected,
                                     const ParameterCount& actual,
                                     InvokeFlag flag,
@@ -3737,8 +3736,10 @@
   // You can't call a function without a valid frame.
   ASSERT(flag == JUMP_FUNCTION || has_frame());
 
+  // Contract with called JS functions requires that function is passed in a1.
+  ASSERT(function.is(a1));
+
   // Get the function and setup the context.
-  li(a1, function);
   lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
 
   // We call indirectly through the code field in the function to
@@ -3749,6 +3750,17 @@
 }
 
 
+void MacroAssembler::InvokeFunction(Handle<JSFunction> function,
+                                    const ParameterCount& expected,
+                                    const ParameterCount& actual,
+                                    InvokeFlag flag,
+                                    const CallWrapper& call_wrapper,
+                                    CallKind call_kind) {
+  li(a1, function);
+  InvokeFunction(a1, expected, actual, flag, call_wrapper, call_kind);
+}
+
+
 void MacroAssembler::IsObjectJSObjectType(Register heap_object,
                                           Register map,
                                           Register scratch,
@@ -3873,8 +3885,6 @@
 
 
 void MacroAssembler::TailCallStub(CodeStub* stub) {
-  ASSERT(allow_stub_calls_ ||
-         stub->CompilingCallsToThisStubIsGCSafe(isolate()));
   Jump(stub->GetCode(isolate()), RelocInfo::CODE_TARGET);
 }
 
@@ -4014,8 +4024,7 @@
 
 
 bool MacroAssembler::AllowThisStubCall(CodeStub* stub) {
-  if (!has_frame_ && stub->SometimesSetsUpAFrame()) return false;
-  return allow_stub_calls_ || stub->CompilingCallsToThisStubIsGCSafe(isolate());
+  return has_frame_ || !stub->SometimesSetsUpAFrame();
 }
 
 
diff --git a/src/mips/macro-assembler-mips.h b/src/mips/macro-assembler-mips.h
index e4187c0..4e30c35 100644
--- a/src/mips/macro-assembler-mips.h
+++ b/src/mips/macro-assembler-mips.h
@@ -917,6 +917,13 @@
                       const CallWrapper& call_wrapper,
                       CallKind call_kind);
 
+  void InvokeFunction(Register function,
+                      const ParameterCount& expected,
+                      const ParameterCount& actual,
+                      InvokeFlag flag,
+                      const CallWrapper& call_wrapper,
+                      CallKind call_kind);
+
   void InvokeFunction(Handle<JSFunction> function,
                       const ParameterCount& expected,
                       const ParameterCount& actual,
@@ -1341,8 +1348,6 @@
   // Verify restrictions about code generated in stubs.
   void set_generating_stub(bool value) { generating_stub_ = value; }
   bool generating_stub() { return generating_stub_; }
-  void set_allow_stub_calls(bool value) { allow_stub_calls_ = value; }
-  bool allow_stub_calls() { return allow_stub_calls_; }
   void set_has_frame(bool value) { has_frame_ = value; }
   bool has_frame() { return has_frame_; }
   inline bool AllowThisStubCall(CodeStub* stub);
@@ -1639,7 +1644,6 @@
   MemOperand SafepointRegistersAndDoublesSlot(Register reg);
 
   bool generating_stub_;
-  bool allow_stub_calls_;
   bool has_frame_;
   // This handle will be patched with the code object on installation.
   Handle<Object> code_object_;
diff --git a/src/mips/stub-cache-mips.cc b/src/mips/stub-cache-mips.cc
index 748a783..ea1c9a5 100644
--- a/src/mips/stub-cache-mips.cc
+++ b/src/mips/stub-cache-mips.cc
@@ -732,35 +732,6 @@
 }
 
 
-static void GenerateCallFunction(MacroAssembler* masm,
-                                 Handle<Object> object,
-                                 const ParameterCount& arguments,
-                                 Label* miss,
-                                 ExtraICState extra_ic_state) {
-  // ----------- S t a t e -------------
-  //  -- a0: receiver
-  //  -- a1: function to call
-  // -----------------------------------
-  // Check that the function really is a function.
-  __ JumpIfSmi(a1, miss);
-  __ GetObjectType(a1, a3, a3);
-  __ Branch(miss, ne, a3, Operand(JS_FUNCTION_TYPE));
-
-  if (object->IsGlobalObject()) {
-    const int argc = arguments.immediate();
-    const int receiver_offset = argc * kPointerSize;
-    __ lw(a3, FieldMemOperand(a0, GlobalObject::kGlobalReceiverOffset));
-    __ sw(a3, MemOperand(sp, receiver_offset));
-  }
-
-  // Invoke the function.
-  CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state)
-      ? CALL_AS_FUNCTION
-      : CALL_AS_METHOD;
-  __ InvokeFunction(a1, arguments, JUMP_FUNCTION, NullCallWrapper(), call_kind);
-}
-
-
 static void PushInterceptorArguments(MacroAssembler* masm,
                                      Register receiver,
                                      Register holder,
@@ -942,7 +913,7 @@
 
 class CallInterceptorCompiler BASE_EMBEDDED {
  public:
-  CallInterceptorCompiler(StubCompiler* stub_compiler,
+  CallInterceptorCompiler(CallStubCompiler* stub_compiler,
                           const ParameterCount& arguments,
                           Register name,
                           ExtraICState extra_ic_state)
@@ -1054,13 +1025,8 @@
       GenerateFastApiDirectCall(
           masm, optimization, arguments_.immediate(), false);
     } else {
-      CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state_)
-          ? CALL_AS_FUNCTION
-          : CALL_AS_METHOD;
       Handle<JSFunction> function = optimization.constant_function();
-      ParameterCount expected(function);
-      __ InvokeFunction(function, expected, arguments_,
-                        JUMP_FUNCTION, NullCallWrapper(), call_kind);
+      stub_compiler_->GenerateJumpFunctionIgnoreReceiver(function);
     }
 
     // Deferred code for fast API call case---clean preallocated space.
@@ -1126,7 +1092,7 @@
     __ Branch(interceptor_succeeded, ne, v0, Operand(scratch));
   }
 
-  StubCompiler* stub_compiler_;
+  CallStubCompiler* stub_compiler_;
   const ParameterCount& arguments_;
   Register name_;
   ExtraICState extra_ic_state_;
@@ -1527,6 +1493,15 @@
 }
 
 
+void CallStubCompiler::GenerateFunctionCheck(Register function,
+                                             Register scratch,
+                                             Label* miss) {
+  __ JumpIfSmi(function, miss);
+  __ GetObjectType(function, scratch, scratch);
+  __ Branch(miss, ne, scratch, Operand(JS_FUNCTION_TYPE));
+}
+
+
 void CallStubCompiler::GenerateLoadFunctionFromCell(
     Handle<Cell> cell,
     Handle<JSFunction> function,
@@ -1542,9 +1517,7 @@
     // the nice side effect that multiple closures based on the same
     // function can all use this call IC. Before we load through the
     // function, we have to verify that it still is a function.
-    __ JumpIfSmi(a1, miss);
-    __ GetObjectType(a1, a3, a3);
-    __ Branch(miss, ne, a3, Operand(JS_FUNCTION_TYPE));
+    GenerateFunctionCheck(a1, a3, miss);
 
     // Check the shared function info. Make sure it hasn't changed.
     __ li(a3, Handle<SharedFunctionInfo>(function->shared()));
@@ -1575,8 +1548,7 @@
       object, holder, name, RECEIVER_MAP_CHECK, &miss);
   GenerateFastPropertyLoad(masm(), a1, reg, index.is_inobject(holder),
                            index.translate(holder), Representation::Tagged());
-
-  GenerateCallFunction(masm(), object, arguments(), &miss, extra_state_);
+  GenerateJumpFunction(object, a1, &miss);
 
   HandlerFrontendFooter(&miss);
 
@@ -1625,10 +1597,12 @@
     Handle<JSFunction> function,
     Handle<String> name,
     Code::StubType type) {
-  // If object is not an array or is observed, bail out to regular call.
+  // If object is not an array or is observed or sealed, bail out to regular
+  // call.
   if (!object->IsJSArray() ||
       !cell.is_null() ||
-      Handle<JSArray>::cast(object)->map()->is_observed()) {
+      Handle<JSArray>::cast(object)->map()->is_observed() ||
+      !Handle<JSArray>::cast(object)->map()->is_extensible()) {
     return Handle<Code>::null();
   }
 
@@ -1867,10 +1841,12 @@
     Handle<JSFunction> function,
     Handle<String> name,
     Code::StubType type) {
-  // If object is not an array or is observed, bail out to regular call.
+  // If object is not an array or is observed or sealed, bail out to regular
+  // call.
   if (!object->IsJSArray() ||
       !cell.is_null() ||
-      Handle<JSArray>::cast(object)->map()->is_observed()) {
+      Handle<JSArray>::cast(object)->map()->is_observed() ||
+      !Handle<JSArray>::cast(object)->map()->is_extensible()) {
     return Handle<Code>::null();
   }
 
@@ -1988,8 +1964,7 @@
   __ bind(&miss);
   // Restore function name in a2.
   __ li(a2, name);
-  __ bind(&name_miss);
-  GenerateMissBranch();
+  HandlerFrontendFooter(&name_miss);
 
   // Return the generated code.
   return GetCode(type, name);
@@ -2053,8 +2028,7 @@
   __ bind(&miss);
   // Restore function name in a2.
   __ li(a2, name);
-  __ bind(&name_miss);
-  GenerateMissBranch();
+  HandlerFrontendFooter(&name_miss);
 
   // Return the generated code.
   return GetCode(type, name);
@@ -2100,12 +2074,10 @@
   StubRuntimeCallHelper call_helper;
   generator.GenerateSlow(masm(), call_helper);
 
-  // Tail call the full function. We do not have to patch the receiver
-  // because the function makes no use of it.
   __ bind(&slow);
-  ParameterCount expected(function);
-  __ InvokeFunction(function, expected, arguments(),
-                    JUMP_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
+  // We do not have to patch the receiver because the function makes no use of
+  // it.
+  GenerateJumpFunctionIgnoreReceiver(function);
 
   HandlerFrontendFooter(&miss);
 
@@ -2211,11 +2183,9 @@
   __ ctc1(a3, FCSR);
 
   __ bind(&slow);
-  // Tail call the full function. We do not have to patch the receiver
-  // because the function makes no use of it.
-  ParameterCount expected(function);
-  __ InvokeFunction(function, expected, arguments(),
-                    JUMP_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
+  // We do not have to patch the receiver because the function makes no use of
+  // it.
+  GenerateJumpFunctionIgnoreReceiver(function);
 
   HandlerFrontendFooter(&miss);
 
@@ -2292,12 +2262,10 @@
   __ sw(a3, FieldMemOperand(v0, HeapNumber::kMantissaOffset));
   __ DropAndRet(argc + 1);
 
-  // Tail call the full function. We do not have to patch the receiver
-  // because the function makes no use of it.
   __ bind(&slow);
-  ParameterCount expected(function);
-  __ InvokeFunction(function, expected, arguments(),
-                    JUMP_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
+  // We do not have to patch the receiver because the function makes no use of
+  // it.
+  GenerateJumpFunctionIgnoreReceiver(function);
 
   HandlerFrontendFooter(&miss);
 
@@ -2352,8 +2320,7 @@
   __ bind(&miss);
   FreeSpaceForFastApiCall(masm());
 
-  __ bind(&miss_before_stack_reserved);
-  GenerateMissBranch();
+  HandlerFrontendFooter(&miss_before_stack_reserved);
 
   // Return the generated code.
   return GetCode(function);
@@ -2468,39 +2435,16 @@
 }
 
 
-void CallStubCompiler::CompileHandlerBackend(Handle<JSFunction> function) {
-  CallKind call_kind = CallICBase::Contextual::decode(extra_state_)
-      ? CALL_AS_FUNCTION
-      : CALL_AS_METHOD;
-  ParameterCount expected(function);
-  __ InvokeFunction(function, expected, arguments(),
-                    JUMP_FUNCTION, NullCallWrapper(), call_kind);
-}
-
-
-Handle<Code> CallStubCompiler::CompileCallConstant(
-    Handle<Object> object,
-    Handle<JSObject> holder,
-    Handle<Name> name,
-    CheckType check,
-    Handle<JSFunction> function) {
-  if (HasCustomCallGenerator(function)) {
-    Handle<Code> code = CompileCustomCall(object, holder,
-                                          Handle<Cell>::null(),
-                                          function, Handle<String>::cast(name),
-                                          Code::FAST);
-    // A null handle means bail out to the regular compiler code below.
-    if (!code.is_null()) return code;
-  }
-
-  Label miss;
-  HandlerFrontendHeader(object, holder, name, check, &miss);
+void CallStubCompiler::GenerateJumpFunction(Handle<Object> object,
+                                            Register function,
+                                            Label* miss) {
+  ASSERT(function.is(a1));
+  // Check that the function really is a function.
+  GenerateFunctionCheck(function, a3, miss);
   PatchGlobalProxy(object);
-  CompileHandlerBackend(function);
-  HandlerFrontendFooter(&miss);
-
-  // Return the generated code.
-  return GetCode(function);
+  // Invoke the function.
+  __ InvokeFunction(a1, arguments(), JUMP_FUNCTION,
+                    NullCallWrapper(), call_kind());
 }
 
 
@@ -2528,11 +2472,9 @@
   // Restore receiver.
   __ lw(a0, MemOperand(sp, argc * kPointerSize));
 
-  GenerateCallFunction(masm(), object, arguments(), &miss, extra_state_);
+  GenerateJumpFunction(object, a1, &miss);
 
-  // Handle call cache miss.
-  __ bind(&miss);
-  GenerateMissBranch();
+  HandlerFrontendFooter(&miss);
 
   // Return the generated code.
   return GetCode(Code::FAST, name);
@@ -2555,26 +2497,12 @@
 
   Label miss;
   HandlerFrontendHeader(object, holder, name, RECEIVER_MAP_CHECK, &miss);
+  // Potentially loads a closure that matches the shared function info of the
+  // function, rather than function.
   GenerateLoadFunctionFromCell(cell, function, &miss);
-  PatchGlobalProxy(object);
-
-  // Set up the context (function already in r1).
-  __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
-
-  // Jump to the cached code (tail call).
   Counters* counters = isolate()->counters();
   __ IncrementCounter(counters->call_global_inline(), 1, a3, t0);
-  ParameterCount expected(function->shared()->formal_parameter_count());
-  CallKind call_kind = CallICBase::Contextual::decode(extra_state_)
-      ? CALL_AS_FUNCTION
-      : CALL_AS_METHOD;
-  // We call indirectly through the code field in the function to
-  // allow recompilation to take effect without changing any of the
-  // call sites.
-  __ lw(a3, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
-  __ InvokeCode(a3, expected, arguments(), JUMP_FUNCTION,
-                NullCallWrapper(), call_kind);
-
+  GenerateJumpFunction(object, a1, function);
   HandlerFrontendFooter(&miss);
 
   // Return the generated code.
diff --git a/src/objects-inl.h b/src/objects-inl.h
index ee81c2f..1717a5f 100644
--- a/src/objects-inl.h
+++ b/src/objects-inl.h
@@ -3959,19 +3959,6 @@
 }
 
 
-bool Code::is_pregenerated() {
-  return (kind() == STUB && IsPregeneratedField::decode(flags()));
-}
-
-
-void Code::set_is_pregenerated(bool value) {
-  ASSERT(kind() == STUB);
-  Flags f = flags();
-  f = static_cast<Flags>(IsPregeneratedField::update(f, value));
-  set_flags(f);
-}
-
-
 bool Code::optimizable() {
   ASSERT_EQ(FUNCTION, kind());
   return READ_BYTE_FIELD(this, kOptimizableOffset) == 1;
diff --git a/src/objects-printer.cc b/src/objects-printer.cc
index 512f532..381c9aa 100644
--- a/src/objects-printer.cc
+++ b/src/objects-printer.cc
@@ -556,6 +556,11 @@
   if (is_access_check_needed()) {
     PrintF(out, " - access_check_needed\n");
   }
+  if (is_frozen()) {
+    PrintF(out, " - frozen\n");
+  } else if (!is_extensible()) {
+    PrintF(out, " - sealed\n");
+  }
   PrintF(out, " - back pointer: ");
   GetBackPointer()->ShortPrint(out);
   PrintF(out, "\n - instance descriptors %s#%i: ",
diff --git a/src/objects.h b/src/objects.h
index db92ecb..bbdefc1 100644
--- a/src/objects.h
+++ b/src/objects.h
@@ -5159,11 +5159,6 @@
   bool is_crankshafted();
   inline void set_is_crankshafted(bool value);
 
-  // For stubs, tells whether they should always exist, so that they can be
-  // called from other stubs.
-  inline bool is_pregenerated();
-  inline void set_is_pregenerated(bool value);
-
   // [optimizable]: For FUNCTION kind, tells if it is optimizable.
   inline bool optimizable();
   inline void set_optimizable(bool value);
@@ -5451,7 +5446,7 @@
   class TypeField: public BitField<StubType, 3, 1> {};
   class CacheHolderField: public BitField<InlineCacheHolderFlag, 5, 1> {};
   class KindField: public BitField<Kind, 6, 4> {};
-  class IsPregeneratedField: public BitField<bool, 10, 1> {};
+  // TODO(bmeurer): Bit 10 is available for free use. :-)
   class ExtraICStateField: public BitField<ExtraICState, 11, 6> {};
   class ExtendedExtraICStateField: public BitField<ExtraICState, 11,
       PlatformSmiTagging::kSmiValueSize - 11 + 1> {};  // NOLINT
diff --git a/src/stub-cache.cc b/src/stub-cache.cc
index 5121cad..cb68b89 100644
--- a/src/stub-cache.cc
+++ b/src/stub-cache.cc
@@ -1128,12 +1128,69 @@
 #define __ ACCESS_MASM(masm())
 
 
+CallKind CallStubCompiler::call_kind() {
+  return CallICBase::Contextual::decode(extra_state_)
+      ? CALL_AS_FUNCTION
+      : CALL_AS_METHOD;
+}
+
+
 void CallStubCompiler::HandlerFrontendFooter(Label* miss) {
   __ bind(miss);
   GenerateMissBranch();
 }
 
 
+void CallStubCompiler::GenerateJumpFunctionIgnoreReceiver(
+    Handle<JSFunction> function) {
+  ParameterCount expected(function);
+  __ InvokeFunction(function, expected, arguments(),
+                    JUMP_FUNCTION, NullCallWrapper(), call_kind());
+}
+
+
+void CallStubCompiler::GenerateJumpFunction(Handle<Object> object,
+                                            Handle<JSFunction> function) {
+  PatchGlobalProxy(object);
+  GenerateJumpFunctionIgnoreReceiver(function);
+}
+
+
+void CallStubCompiler::GenerateJumpFunction(Handle<Object> object,
+                                            Register actual_closure,
+                                            Handle<JSFunction> function) {
+  PatchGlobalProxy(object);
+  ParameterCount expected(function);
+  __ InvokeFunction(actual_closure, expected, arguments(),
+                    JUMP_FUNCTION, NullCallWrapper(), call_kind());
+}
+
+
+Handle<Code> CallStubCompiler::CompileCallConstant(
+    Handle<Object> object,
+    Handle<JSObject> holder,
+    Handle<Name> name,
+    CheckType check,
+    Handle<JSFunction> function) {
+  if (HasCustomCallGenerator(function)) {
+    Handle<Code> code = CompileCustomCall(object, holder,
+                                          Handle<Cell>::null(),
+                                          function, Handle<String>::cast(name),
+                                          Code::FAST);
+    // A null handle means bail out to the regular compiler code below.
+    if (!code.is_null()) return code;
+  }
+
+  Label miss;
+  HandlerFrontendHeader(object, holder, name, check, &miss);
+  GenerateJumpFunction(object, function);
+  HandlerFrontendFooter(&miss);
+
+  // Return the generated code.
+  return GetCode(function);
+}
+
+
 Register LoadStubCompiler::HandlerFrontendHeader(
     Handle<Type> type,
     Register object_reg,
diff --git a/src/stub-cache.h b/src/stub-cache.h
index c9510a8..c70b1ff 100644
--- a/src/stub-cache.h
+++ b/src/stub-cache.h
@@ -919,7 +919,17 @@
                                  Label* miss);
   void HandlerFrontendFooter(Label* miss);
 
-  void CompileHandlerBackend(Handle<JSFunction> function);
+  void GenerateJumpFunctionIgnoreReceiver(Handle<JSFunction> function);
+  void GenerateJumpFunction(Handle<Object> object,
+                            Handle<JSFunction> function);
+  void GenerateJumpFunction(Handle<Object> object,
+                            Register function,
+                            Label* miss);
+  // Use to call |actual_closure|, a closure with the same shared function info
+  // as |function|.
+  void GenerateJumpFunction(Handle<Object> object,
+                            Register actual_closure,
+                            Handle<JSFunction> function);
 
   Handle<Code> CompileCallConstant(Handle<Object> object,
                                    Handle<JSObject> holder,
@@ -968,6 +978,8 @@
                                   Handle<JSFunction> function,
                                   Handle<String> name);
 
+  CallKind call_kind();
+
   Handle<Code> GetCode(Code::StubType type, Handle<Name> name);
   Handle<Code> GetCode(Handle<JSFunction> function);
 
@@ -981,6 +993,8 @@
                                     Handle<JSFunction> function,
                                     Label* miss);
 
+  void GenerateFunctionCheck(Register function, Register scratch, Label* miss);
+
   // Generates a jump to CallIC miss stub.
   void GenerateMissBranch();
 
diff --git a/src/type-info.cc b/src/type-info.cc
index 79134ca..e4eb7db 100644
--- a/src/type-info.cc
+++ b/src/type-info.cc
@@ -522,11 +522,7 @@
   Handle<Object> object = GetInfo(ast_id);
   if (object->IsUndefined() || object->IsSmi()) return;
 
-  if (object.is_identical_to(isolate_->builtins()->StoreIC_GlobalProxy())) {
-    // TODO(fschneider): We could collect the maps and signal that
-    // we need a generic store (or load) here.
-    ASSERT(Handle<Code>::cast(object)->ic_state() == GENERIC);
-  } else if (object->IsMap()) {
+  if (object->IsMap()) {
     types->AddMapIfMissing(Handle<Map>::cast(object), zone());
   } else if (Handle<Code>::cast(object)->ic_state() == POLYMORPHIC ||
              Handle<Code>::cast(object)->ic_state() == MONOMORPHIC) {
diff --git a/src/version.cc b/src/version.cc
index e4705d2..d4f7feb 100644
--- a/src/version.cc
+++ b/src/version.cc
@@ -34,7 +34,7 @@
 // system so their names cannot be changed without changing the scripts.
 #define MAJOR_VERSION     3
 #define MINOR_VERSION     23
-#define BUILD_NUMBER      15
+#define BUILD_NUMBER      16
 #define PATCH_LEVEL       0
 // Use 1 for candidates and 0 otherwise.
 // (Boolean macro values are not supported by all preprocessors.)
diff --git a/src/x64/code-stubs-x64.cc b/src/x64/code-stubs-x64.cc
index 4fb3a70..6f35a21 100644
--- a/src/x64/code-stubs-x64.cc
+++ b/src/x64/code-stubs-x64.cc
@@ -2781,21 +2781,11 @@
 }
 
 
-bool CEntryStub::IsPregenerated(Isolate* isolate) {
-#ifdef _WIN64
-  return result_size_ == 1;
-#else
-  return true;
-#endif
-}
-
-
 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
   CEntryStub::GenerateAheadOfTime(isolate);
   StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate);
   StubFailureTrampolineStub::GenerateAheadOfTime(isolate);
   // It is important that the store buffer overflow stubs are generated first.
-  RecordWriteStub::GenerateFixedRegStubsAheadOfTime(isolate);
   ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate);
   CreateAllocationSiteStub::GenerateAheadOfTime(isolate);
   BinaryOpStub::GenerateAheadOfTime(isolate);
@@ -2808,9 +2798,9 @@
 
 void CEntryStub::GenerateAheadOfTime(Isolate* isolate) {
   CEntryStub stub(1, kDontSaveFPRegs);
-  stub.GetCode(isolate)->set_is_pregenerated(true);
+  stub.GetCode(isolate);
   CEntryStub save_doubles(1, kSaveFPRegs);
-  save_doubles.GetCode(isolate)->set_is_pregenerated(true);
+  save_doubles.GetCode(isolate);
 }
 
 
@@ -5085,91 +5075,12 @@
 }
 
 
-struct AheadOfTimeWriteBarrierStubList {
-  Register object, value, address;
-  RememberedSetAction action;
-};
-
-
-#define REG(Name) { kRegister_ ## Name ## _Code }
-
-struct AheadOfTimeWriteBarrierStubList kAheadOfTime[] = {
-  // Used in RegExpExecStub.
-  { REG(rbx), REG(rax), REG(rdi), EMIT_REMEMBERED_SET },
-  // Used in CompileArrayPushCall.
-  { REG(rbx), REG(rcx), REG(rdx), EMIT_REMEMBERED_SET },
-  // Used in StoreStubCompiler::CompileStoreField and
-  // KeyedStoreStubCompiler::CompileStoreField via GenerateStoreField.
-  { REG(rdx), REG(rcx), REG(rbx), EMIT_REMEMBERED_SET },
-  // GenerateStoreField calls the stub with two different permutations of
-  // registers.  This is the second.
-  { REG(rbx), REG(rcx), REG(rdx), EMIT_REMEMBERED_SET },
-  // StoreIC::GenerateNormal via GenerateDictionaryStore.
-  { REG(rbx), REG(r8), REG(r9), EMIT_REMEMBERED_SET },
-  // KeyedStoreIC::GenerateGeneric.
-  { REG(rbx), REG(rdx), REG(rcx), EMIT_REMEMBERED_SET},
-  // KeyedStoreStubCompiler::GenerateStoreFastElement.
-  { REG(rdi), REG(rbx), REG(rcx), EMIT_REMEMBERED_SET},
-  { REG(rdx), REG(rdi), REG(rbx), EMIT_REMEMBERED_SET},
-  // ElementsTransitionGenerator::GenerateMapChangeElementTransition
-  // and ElementsTransitionGenerator::GenerateSmiToDouble
-  // and ElementsTransitionGenerator::GenerateDoubleToObject
-  { REG(rdx), REG(rbx), REG(rdi), EMIT_REMEMBERED_SET},
-  { REG(rdx), REG(rbx), REG(rdi), OMIT_REMEMBERED_SET},
-  // ElementsTransitionGenerator::GenerateSmiToDouble
-  // and ElementsTransitionGenerator::GenerateDoubleToObject
-  { REG(rdx), REG(r11), REG(r15), EMIT_REMEMBERED_SET},
-  // ElementsTransitionGenerator::GenerateDoubleToObject
-  { REG(r11), REG(rax), REG(r15), EMIT_REMEMBERED_SET},
-  // StoreArrayLiteralElementStub::Generate
-  { REG(rbx), REG(rax), REG(rcx), EMIT_REMEMBERED_SET},
-  // FastNewClosureStub::Generate and
-  // StringAddStub::Generate
-  { REG(rcx), REG(rdx), REG(rbx), EMIT_REMEMBERED_SET},
-  // StringAddStub::Generate
-  { REG(rcx), REG(rax), REG(rbx), EMIT_REMEMBERED_SET},
-  // Null termination.
-  { REG(no_reg), REG(no_reg), REG(no_reg), EMIT_REMEMBERED_SET}
-};
-
-#undef REG
-
-bool RecordWriteStub::IsPregenerated(Isolate* isolate) {
-  for (AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime;
-       !entry->object.is(no_reg);
-       entry++) {
-    if (object_.is(entry->object) &&
-        value_.is(entry->value) &&
-        address_.is(entry->address) &&
-        remembered_set_action_ == entry->action &&
-        save_fp_regs_mode_ == kDontSaveFPRegs) {
-      return true;
-    }
-  }
-  return false;
-}
-
-
 void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(
     Isolate* isolate) {
   StoreBufferOverflowStub stub1(kDontSaveFPRegs);
-  stub1.GetCode(isolate)->set_is_pregenerated(true);
+  stub1.GetCode(isolate);
   StoreBufferOverflowStub stub2(kSaveFPRegs);
-  stub2.GetCode(isolate)->set_is_pregenerated(true);
-}
-
-
-void RecordWriteStub::GenerateFixedRegStubsAheadOfTime(Isolate* isolate) {
-  for (AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime;
-       !entry->object.is(no_reg);
-       entry++) {
-    RecordWriteStub stub(entry->object,
-                         entry->value,
-                         entry->address,
-                         entry->action,
-                         kDontSaveFPRegs);
-    stub.GetCode(isolate)->set_is_pregenerated(true);
-  }
+  stub2.GetCode(isolate);
 }
 
 
@@ -5496,10 +5407,6 @@
 
 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
   if (masm->isolate()->function_entry_hook() != NULL) {
-    // It's always safe to call the entry hook stub, as the hook itself
-    // is not allowed to call back to V8.
-    AllowStubCallsScope allow_stub_calls(masm, true);
-
     ProfileEntryHookStub stub;
     masm->CallStub(&stub);
   }
@@ -5667,12 +5574,12 @@
   for (int i = 0; i <= to_index; ++i) {
     ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
     T stub(kind);
-    stub.GetCode(isolate)->set_is_pregenerated(true);
+    stub.GetCode(isolate);
     if (AllocationSite::GetMode(kind) != DONT_TRACK_ALLOCATION_SITE ||
         (!FLAG_track_allocation_sites &&
          (kind == initial_kind || kind == initial_holey_kind))) {
       T stub1(kind, CONTEXT_CHECK_REQUIRED, DISABLE_ALLOCATION_SITES);
-      stub1.GetCode(isolate)->set_is_pregenerated(true);
+      stub1.GetCode(isolate);
     }
   }
 }
@@ -5694,11 +5601,11 @@
   for (int i = 0; i < 2; i++) {
     // For internal arrays we only need a few things
     InternalArrayNoArgumentConstructorStub stubh1(kinds[i]);
-    stubh1.GetCode(isolate)->set_is_pregenerated(true);
+    stubh1.GetCode(isolate);
     InternalArraySingleArgumentConstructorStub stubh2(kinds[i]);
-    stubh2.GetCode(isolate)->set_is_pregenerated(true);
+    stubh2.GetCode(isolate);
     InternalArrayNArgumentsConstructorStub stubh3(kinds[i]);
-    stubh3.GetCode(isolate)->set_is_pregenerated(true);
+    stubh3.GetCode(isolate);
   }
 }
 
diff --git a/src/x64/code-stubs-x64.h b/src/x64/code-stubs-x64.h
index c76abcf..7a3f6a6 100644
--- a/src/x64/code-stubs-x64.h
+++ b/src/x64/code-stubs-x64.h
@@ -69,7 +69,6 @@
 
   void Generate(MacroAssembler* masm);
 
-  virtual bool IsPregenerated(Isolate* isolate) V8_OVERRIDE { return true; }
   static void GenerateFixedRegStubsAheadOfTime(Isolate* isolate);
   virtual bool SometimesSetsUpAFrame() { return false; }
 
@@ -293,8 +292,6 @@
     INCREMENTAL_COMPACTION
   };
 
-  virtual bool IsPregenerated(Isolate* isolate) V8_OVERRIDE;
-  static void GenerateFixedRegStubsAheadOfTime(Isolate* isolate);
   virtual bool SometimesSetsUpAFrame() { return false; }
 
   static const byte kTwoByteNopInstruction = 0x3c;  // Cmpb al, #imm8.
diff --git a/src/x64/macro-assembler-x64.cc b/src/x64/macro-assembler-x64.cc
index 49629c7..6c3f501 100644
--- a/src/x64/macro-assembler-x64.cc
+++ b/src/x64/macro-assembler-x64.cc
@@ -45,7 +45,6 @@
 MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size)
     : Assembler(arg_isolate, buffer, size),
       generating_stub_(false),
-      allow_stub_calls_(true),
       has_frame_(false),
       root_array_available_(true) {
   if (isolate() != NULL) {
@@ -555,8 +554,6 @@
 
 
 void MacroAssembler::TailCallStub(CodeStub* stub) {
-  ASSERT(allow_stub_calls_ ||
-         stub->CompilingCallsToThisStubIsGCSafe(isolate()));
   Jump(stub->GetCode(isolate()), RelocInfo::CODE_TARGET);
 }
 
@@ -568,8 +565,7 @@
 
 
 bool MacroAssembler::AllowThisStubCall(CodeStub* stub) {
-  if (!has_frame_ && stub->SometimesSetsUpAFrame()) return false;
-  return allow_stub_calls_ || stub->CompilingCallsToThisStubIsGCSafe(isolate());
+  return has_frame_ || !stub->SometimesSetsUpAFrame();
 }
 
 
@@ -1049,14 +1045,7 @@
   if (emit_debug_code()) {
     movq(dst, Smi::FromInt(kSmiConstantRegisterValue), RelocInfo::NONE64);
     cmpq(dst, kSmiConstantRegister);
-    if (allow_stub_calls()) {
-      Assert(equal, kUninitializedKSmiConstantRegister);
-    } else {
-      Label ok;
-      j(equal, &ok, Label::kNear);
-      int3();
-      bind(&ok);
-    }
+    Assert(equal, kUninitializedKSmiConstantRegister);
   }
   int value = source->value();
   if (value == 0) {
@@ -1117,11 +1106,7 @@
     testb(dst, Immediate(0x01));
     Label ok;
     j(zero, &ok, Label::kNear);
-    if (allow_stub_calls()) {
-      Abort(kInteger32ToSmiFieldWritingToNonSmiLocation);
-    } else {
-      int3();
-    }
+    Abort(kInteger32ToSmiFieldWritingToNonSmiLocation);
     bind(&ok);
   }
   ASSERT(kSmiShift % kBitsPerByte == 0);
@@ -2182,10 +2167,8 @@
   ASSERT(!dst.is(src2));
   // Both operands must not be smis.
 #ifdef DEBUG
-  if (allow_stub_calls()) {  // Check contains a stub call.
-    Condition not_both_smis = NegateCondition(CheckBothSmi(src1, src2));
-    Check(not_both_smis, kBothRegistersWereSmisInSelectNonSmi);
-  }
+  Condition not_both_smis = NegateCondition(CheckBothSmi(src1, src2));
+  Check(not_both_smis, kBothRegistersWereSmisInSelectNonSmi);
 #endif
   STATIC_ASSERT(kSmiTag == 0);
   ASSERT_EQ(0, Smi::FromInt(0));
@@ -3601,7 +3584,7 @@
 }
 
 
-void MacroAssembler::InvokeFunction(Handle<JSFunction> function,
+void MacroAssembler::InvokeFunction(Register function,
                                     const ParameterCount& expected,
                                     const ParameterCount& actual,
                                     InvokeFlag flag,
@@ -3610,18 +3593,27 @@
   // You can't call a function without a valid frame.
   ASSERT(flag == JUMP_FUNCTION || has_frame());
 
-  // Get the function and setup the context.
-  Move(rdi, function);
-  movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
-
-  // We call indirectly through the code field in the function to
-  // allow recompilation to take effect without changing any of the
-  // call sites.
+  ASSERT(function.is(rdi));
+  movq(rsi, FieldOperand(function, JSFunction::kContextOffset));
+  // Advances rdx to the end of the Code object header, to the start of
+  // the executable code.
   movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
+
   InvokeCode(rdx, expected, actual, flag, call_wrapper, call_kind);
 }
 
 
+void MacroAssembler::InvokeFunction(Handle<JSFunction> function,
+                                    const ParameterCount& expected,
+                                    const ParameterCount& actual,
+                                    InvokeFlag flag,
+                                    const CallWrapper& call_wrapper,
+                                    CallKind call_kind) {
+  Move(rdi, function);
+  InvokeFunction(rdi, expected, actual, flag, call_wrapper, call_kind);
+}
+
+
 void MacroAssembler::InvokePrologue(const ParameterCount& expected,
                                     const ParameterCount& actual,
                                     Handle<Code> code_constant,
diff --git a/src/x64/macro-assembler-x64.h b/src/x64/macro-assembler-x64.h
index 06b2f80..98808a8 100644
--- a/src/x64/macro-assembler-x64.h
+++ b/src/x64/macro-assembler-x64.h
@@ -371,6 +371,13 @@
                       const CallWrapper& call_wrapper,
                       CallKind call_kind);
 
+  void InvokeFunction(Register function,
+                      const ParameterCount& expected,
+                      const ParameterCount& actual,
+                      InvokeFlag flag,
+                      const CallWrapper& call_wrapper,
+                      CallKind call_kind);
+
   void InvokeFunction(Handle<JSFunction> function,
                       const ParameterCount& expected,
                       const ParameterCount& actual,
@@ -1393,8 +1400,6 @@
   // Verify restrictions about code generated in stubs.
   void set_generating_stub(bool value) { generating_stub_ = value; }
   bool generating_stub() { return generating_stub_; }
-  void set_allow_stub_calls(bool value) { allow_stub_calls_ = value; }
-  bool allow_stub_calls() { return allow_stub_calls_; }
   void set_has_frame(bool value) { has_frame_ = value; }
   bool has_frame() { return has_frame_; }
   inline bool AllowThisStubCall(CodeStub* stub);
@@ -1444,7 +1449,6 @@
   static const int kSmiShift = kSmiTagSize + kSmiShiftSize;
 
   bool generating_stub_;
-  bool allow_stub_calls_;
   bool has_frame_;
   bool root_array_available_;
 
diff --git a/src/x64/stub-cache-x64.cc b/src/x64/stub-cache-x64.cc
index 192f7ba..e7bbe27 100644
--- a/src/x64/stub-cache-x64.cc
+++ b/src/x64/stub-cache-x64.cc
@@ -637,7 +637,7 @@
 
 class CallInterceptorCompiler BASE_EMBEDDED {
  public:
-  CallInterceptorCompiler(StubCompiler* stub_compiler,
+  CallInterceptorCompiler(CallStubCompiler* stub_compiler,
                           const ParameterCount& arguments,
                           Register name,
                           ExtraICState extra_ic_state)
@@ -748,13 +748,8 @@
     if (can_do_fast_api_call) {
       GenerateFastApiCall(masm, optimization, arguments_.immediate());
     } else {
-      CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state_)
-          ? CALL_AS_FUNCTION
-          : CALL_AS_METHOD;
       Handle<JSFunction> fun = optimization.constant_function();
-      ParameterCount expected(fun);
-      __ InvokeFunction(fun, expected, arguments_,
-                        JUMP_FUNCTION, NullCallWrapper(), call_kind);
+      stub_compiler_->GenerateJumpFunctionIgnoreReceiver(fun);
     }
 
     // Deferred code for fast API call case---clean preallocated space.
@@ -822,7 +817,7 @@
     __ j(not_equal, interceptor_succeeded);
   }
 
-  StubCompiler* stub_compiler_;
+  CallStubCompiler* stub_compiler_;
   const ParameterCount& arguments_;
   Register name_;
   ExtraICState extra_ic_state_;
@@ -1537,6 +1532,15 @@
 }
 
 
+void CallStubCompiler::GenerateFunctionCheck(Register function,
+                                             Register scratch,
+                                             Label* miss) {
+  __ JumpIfSmi(function, miss);
+  __ CmpObjectType(function, JS_FUNCTION_TYPE, scratch);
+  __ j(not_equal, miss);
+}
+
+
 void CallStubCompiler::GenerateLoadFunctionFromCell(
     Handle<Cell> cell,
     Handle<JSFunction> function,
@@ -1552,9 +1556,7 @@
     // the nice side effect that multiple closures based on the same
     // function can all use this call IC. Before we load through the
     // function, we have to verify that it still is a function.
-    __ JumpIfSmi(rdi, miss);
-    __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rax);
-    __ j(not_equal, miss);
+    GenerateFunctionCheck(rdi, rax, miss);
 
     // Check the shared function info. Make sure it hasn't changed.
     __ Move(rax, Handle<SharedFunctionInfo>(function->shared()));
@@ -1586,20 +1588,7 @@
 
   GenerateFastPropertyLoad(masm(), rdi, reg, index.is_inobject(holder),
                            index.translate(holder), Representation::Tagged());
-
-  // Check that the function really is a function.
-  __ JumpIfSmi(rdi, &miss);
-  __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rbx);
-  __ j(not_equal, &miss);
-
-  PatchGlobalProxy(object);
-
-  // Invoke the function.
-  CallKind call_kind = CallICBase::Contextual::decode(extra_state_)
-      ? CALL_AS_FUNCTION
-      : CALL_AS_METHOD;
-  __ InvokeFunction(rdi, arguments(), JUMP_FUNCTION,
-                    NullCallWrapper(), call_kind);
+  GenerateJumpFunction(object, rdi, &miss);
 
   HandlerFrontendFooter(&miss);
 
@@ -1648,18 +1637,12 @@
     Handle<JSFunction> function,
     Handle<String> name,
     Code::StubType type) {
-  // ----------- S t a t e -------------
-  //  -- rcx                 : name
-  //  -- rsp[0]              : return address
-  //  -- rsp[(argc - n) * 8] : arg[n] (zero-based)
-  //  -- ...
-  //  -- rsp[(argc + 1) * 8] : receiver
-  // -----------------------------------
-
-  // If object is not an array or is observed, bail out to regular call.
+  // If object is not an array or is observed or sealed, bail out to regular
+  // call.
   if (!object->IsJSArray() ||
       !cell.is_null() ||
-      Handle<JSArray>::cast(object)->map()->is_observed()) {
+      Handle<JSArray>::cast(object)->map()->is_observed() ||
+      !Handle<JSArray>::cast(object)->map()->is_extensible()) {
     return Handle<Code>::null();
   }
 
@@ -1895,10 +1878,12 @@
     Handle<JSFunction> function,
     Handle<String> name,
     Code::StubType type) {
-  // If object is not an array or is observed, bail out to regular call.
+  // If object is not an array or is observed or sealed, bail out to regular
+  // call.
   if (!object->IsJSArray() ||
       !cell.is_null() ||
-      Handle<JSArray>::cast(object)->map()->is_observed()) {
+      Handle<JSArray>::cast(object)->map()->is_observed() ||
+      !Handle<JSArray>::cast(object)->map()->is_extensible()) {
     return Handle<Code>::null();
   }
 
@@ -2014,8 +1999,7 @@
   __ bind(&miss);
   // Restore function name in rcx.
   __ Move(rcx, name);
-  __ bind(&name_miss);
-  GenerateMissBranch();
+  HandlerFrontendFooter(&name_miss);
 
   // Return the generated code.
   return GetCode(type, name);
@@ -2080,8 +2064,7 @@
   __ bind(&miss);
   // Restore function name in rcx.
   __ Move(rcx, name);
-  __ bind(&name_miss);
-  GenerateMissBranch();
+  HandlerFrontendFooter(&name_miss);
 
   // Return the generated code.
   return GetCode(type, name);
@@ -2127,15 +2110,10 @@
   StubRuntimeCallHelper call_helper;
   generator.GenerateSlow(masm(), call_helper);
 
-  // Tail call the full function. We do not have to patch the receiver
-  // because the function makes no use of it.
   __ bind(&slow);
-  CallKind call_kind = CallICBase::Contextual::decode(extra_state_)
-      ? CALL_AS_FUNCTION
-      : CALL_AS_METHOD;
-  ParameterCount expected(function);
-  __ InvokeFunction(function, expected, arguments(),
-                    JUMP_FUNCTION, NullCallWrapper(), call_kind);
+  // We do not have to patch the receiver because the function makes no use of
+  // it.
+  GenerateJumpFunctionIgnoreReceiver(function);
 
   HandlerFrontendFooter(&miss);
 
@@ -2238,12 +2216,10 @@
   __ movq(rax, args.GetArgumentOperand(1));
   __ ret(2 * kPointerSize);
 
-  // Tail call the full function. We do not have to patch the receiver
-  // because the function makes no use of it.
   __ bind(&slow);
-  ParameterCount expected(function);
-  __ InvokeFunction(function, expected, arguments(),
-                    JUMP_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
+  // We do not have to patch the receiver because the function makes no use of
+  // it.
+  GenerateJumpFunctionIgnoreReceiver(function);
 
   HandlerFrontendFooter(&miss);
 
@@ -2324,15 +2300,10 @@
   __ MoveDouble(FieldOperand(rax, HeapNumber::kValueOffset), rbx);
   __ ret(2 * kPointerSize);
 
-  // Tail call the full function. We do not have to patch the receiver
-  // because the function makes no use of it.
   __ bind(&slow);
-  CallKind call_kind = CallICBase::Contextual::decode(extra_state_)
-      ? CALL_AS_FUNCTION
-      : CALL_AS_METHOD;
-  ParameterCount expected(function);
-  __ InvokeFunction(function, expected, arguments(),
-                    JUMP_FUNCTION, NullCallWrapper(), call_kind);
+  // We do not have to patch the receiver because the function makes no use of
+  // it.
+  GenerateJumpFunctionIgnoreReceiver(function);
 
   HandlerFrontendFooter(&miss);
 
@@ -2390,8 +2361,7 @@
   __ bind(&miss);
   __ addq(rsp, Immediate(kFastApiCallArguments * kPointerSize));
 
-  __ bind(&miss_before_stack_reserved);
-  GenerateMissBranch();
+  HandlerFrontendFooter(&miss_before_stack_reserved);
 
   // Return the generated code.
   return GetCode(function);
@@ -2499,39 +2469,18 @@
 }
 
 
-void CallStubCompiler::CompileHandlerBackend(Handle<JSFunction> function) {
-  CallKind call_kind = CallICBase::Contextual::decode(extra_state_)
-      ? CALL_AS_FUNCTION
-      : CALL_AS_METHOD;
-  ParameterCount expected(function);
-  __ InvokeFunction(function, expected, arguments(),
-                    JUMP_FUNCTION, NullCallWrapper(), call_kind);
-}
+void CallStubCompiler::GenerateJumpFunction(Handle<Object> object,
+                                            Register function,
+                                            Label* miss) {
+  // Check that the function really is a function.
+  GenerateFunctionCheck(function, rbx, miss);
 
-
-Handle<Code> CallStubCompiler::CompileCallConstant(
-    Handle<Object> object,
-    Handle<JSObject> holder,
-    Handle<Name> name,
-    CheckType check,
-    Handle<JSFunction> function) {
-  if (HasCustomCallGenerator(function)) {
-    Handle<Code> code = CompileCustomCall(object, holder,
-                                          Handle<PropertyCell>::null(),
-                                          function, Handle<String>::cast(name),
-                                          Code::FAST);
-    // A null handle means bail out to the regular compiler code below.
-    if (!code.is_null()) return code;
-  }
-
-  Label miss;
-  HandlerFrontendHeader(object, holder, name, check, &miss);
+  if (!function.is(rdi)) __ movq(rdi, function);
   PatchGlobalProxy(object);
-  CompileHandlerBackend(function);
-  HandlerFrontendFooter(&miss);
 
-  // Return the generated code.
-  return GetCode(function);
+  // Invoke the function.
+  __ InvokeFunction(rdi, arguments(), JUMP_FUNCTION,
+                    NullCallWrapper(), call_kind());
 }
 
 
@@ -2555,29 +2504,9 @@
   // Restore receiver.
   __ movq(rdx, args.GetReceiverOperand());
 
-  // Check that the function really is a function.
-  __ JumpIfSmi(rax, &miss);
-  __ CmpObjectType(rax, JS_FUNCTION_TYPE, rbx);
-  __ j(not_equal, &miss);
+  GenerateJumpFunction(object, rax, &miss);
 
-  // Patch the receiver on the stack with the global proxy if
-  // necessary.
-  if (object->IsGlobalObject()) {
-    __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalReceiverOffset));
-    __ movq(args.GetReceiverOperand(), rdx);
-  }
-
-  // Invoke the function.
-  __ movq(rdi, rax);
-  CallKind call_kind = CallICBase::Contextual::decode(extra_state_)
-      ? CALL_AS_FUNCTION
-      : CALL_AS_METHOD;
-  __ InvokeFunction(rdi, arguments(), JUMP_FUNCTION,
-                    NullCallWrapper(), call_kind);
-
-  // Handle load cache miss.
-  __ bind(&miss);
-  GenerateMissBranch();
+  HandlerFrontendFooter(&miss);
 
   // Return the generated code.
   return GetCode(Code::FAST, name);
@@ -2600,26 +2529,12 @@
 
   Label miss;
   HandlerFrontendHeader(object, holder, name, RECEIVER_MAP_CHECK, &miss);
+  // Potentially loads a closure that matches the shared function info of the
+  // function, rather than function.
   GenerateLoadFunctionFromCell(cell, function, &miss);
-  PatchGlobalProxy(object);
-
-  // Set up the context (function already in rdi).
-  __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
-
-  // Jump to the cached code (tail call).
   Counters* counters = isolate()->counters();
   __ IncrementCounter(counters->call_global_inline(), 1);
-  ParameterCount expected(function->shared()->formal_parameter_count());
-  CallKind call_kind = CallICBase::Contextual::decode(extra_state_)
-      ? CALL_AS_FUNCTION
-      : CALL_AS_METHOD;
-  // We call indirectly through the code field in the function to
-  // allow recompilation to take effect without changing any of the
-  // call sites.
-  __ movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
-  __ InvokeCode(rdx, expected, arguments(), JUMP_FUNCTION,
-                NullCallWrapper(), call_kind);
-
+  GenerateJumpFunction(object, rdi, function);
   HandlerFrontendFooter(&miss);
 
   // Return the generated code.
diff --git a/test/cctest/test-code-stubs-arm.cc b/test/cctest/test-code-stubs-arm.cc
index 54eaa58..53cdd16 100644
--- a/test/cctest/test-code-stubs-arm.cc
+++ b/test/cctest/test-code-stubs-arm.cc
@@ -53,7 +53,6 @@
   CHECK(buffer);
   HandleScope handles(isolate);
   MacroAssembler masm(isolate, buffer, static_cast<int>(actual_size));
-  masm.set_allow_stub_calls(false);
   DoubleToIStub stub(source_reg, destination_reg, 0, true, inline_fastpath);
 
   byte* start = stub.GetCode(isolate)->instruction_start();
diff --git a/test/cctest/test-code-stubs-ia32.cc b/test/cctest/test-code-stubs-ia32.cc
index 34db99f..c206a01 100644
--- a/test/cctest/test-code-stubs-ia32.cc
+++ b/test/cctest/test-code-stubs-ia32.cc
@@ -53,7 +53,6 @@
   CHECK(buffer);
   HandleScope handles(isolate);
   MacroAssembler assm(isolate, buffer, static_cast<int>(actual_size));
-  assm.set_allow_stub_calls(false);
   int offset =
     source_reg.is(esp) ? 0 : (HeapNumber::kValueOffset - kSmiTagSize);
   DoubleToIStub stub(source_reg, destination_reg, offset, true);
diff --git a/test/cctest/test-code-stubs-x64.cc b/test/cctest/test-code-stubs-x64.cc
index 4f89a63..6b3a12c 100644
--- a/test/cctest/test-code-stubs-x64.cc
+++ b/test/cctest/test-code-stubs-x64.cc
@@ -52,7 +52,6 @@
   CHECK(buffer);
   HandleScope handles(isolate);
   MacroAssembler assm(isolate, buffer, static_cast<int>(actual_size));
-  assm.set_allow_stub_calls(false);
   int offset =
     source_reg.is(rsp) ? 0 : (HeapNumber::kValueOffset - kSmiTagSize);
   DoubleToIStub stub(source_reg, destination_reg, offset, true);
diff --git a/test/cctest/test-macro-assembler-arm.cc b/test/cctest/test-macro-assembler-arm.cc
index 377450a..d40b8a5 100644
--- a/test/cctest/test-macro-assembler-arm.cc
+++ b/test/cctest/test-macro-assembler-arm.cc
@@ -148,7 +148,6 @@
   HandleScope handles(isolate);
   MacroAssembler assembler(isolate, buffer, static_cast<int>(actual_size));
   MacroAssembler* masm = &assembler;  // Create a pointer for the __ macro.
-  masm->set_allow_stub_calls(false);
   __ sub(sp, sp, Operand(1 * kPointerSize));
   Label exit;
 
diff --git a/test/cctest/test-macro-assembler-ia32.cc b/test/cctest/test-macro-assembler-ia32.cc
index 4c817dc..38c738f 100644
--- a/test/cctest/test-macro-assembler-ia32.cc
+++ b/test/cctest/test-macro-assembler-ia32.cc
@@ -62,7 +62,6 @@
   HandleScope handles(isolate);
   MacroAssembler assembler(isolate, buffer, static_cast<int>(actual_size));
   MacroAssembler* masm = &assembler;  // Create a pointer for the __ macro.
-  masm->set_allow_stub_calls(false);
   __ push(ebx);
   __ push(edx);
   __ sub(esp, Immediate(1 * kPointerSize));
diff --git a/test/cctest/test-macro-assembler-x64.cc b/test/cctest/test-macro-assembler-x64.cc
index a85d8e0..3f25162 100644
--- a/test/cctest/test-macro-assembler-x64.cc
+++ b/test/cctest/test-macro-assembler-x64.cc
@@ -165,7 +165,6 @@
   HandleScope handles(isolate);
   MacroAssembler assembler(isolate, buffer, static_cast<int>(actual_size));
   MacroAssembler* masm = &assembler;  // Create a pointer for the __ macro.
-  masm->set_allow_stub_calls(false);
   EntryCode(masm);
   Label exit;
 
@@ -255,7 +254,6 @@
   MacroAssembler assembler(isolate, buffer, static_cast<int>(actual_size));
 
   MacroAssembler* masm = &assembler;
-  masm->set_allow_stub_calls(false);
   EntryCode(masm);
   Label exit;
 
@@ -306,7 +304,6 @@
   MacroAssembler assembler(isolate, buffer, static_cast<int>(actual_size));
 
   MacroAssembler* masm = &assembler;
-  masm->set_allow_stub_calls(false);
   EntryCode(masm);
   Label exit;
 
@@ -435,7 +432,6 @@
   MacroAssembler assembler(isolate, buffer, static_cast<int>(actual_size));
 
   MacroAssembler* masm = &assembler;
-  masm->set_allow_stub_calls(false);
   EntryCode(masm);
   Label exit;
 
@@ -480,7 +476,6 @@
   MacroAssembler assembler(isolate, buffer, static_cast<int>(actual_size));
 
   MacroAssembler* masm = &assembler;
-  masm->set_allow_stub_calls(false);
   EntryCode(masm);
   Label exit;
   Condition cond;
@@ -729,7 +724,6 @@
   MacroAssembler assembler(isolate, buffer, static_cast<int>(actual_size));
 
   MacroAssembler* masm = &assembler;
-  masm->set_allow_stub_calls(false);
   EntryCode(masm);
   Label exit;
 
@@ -937,7 +931,7 @@
   // Allocate an executable page of memory.
   size_t actual_size;
   byte* buffer =
-      static_cast<byte*>(OS::Allocate(Assembler::kMinimalBufferSize * 2,
+      static_cast<byte*>(OS::Allocate(Assembler::kMinimalBufferSize * 3,
                                       &actual_size,
                                       true));
   CHECK(buffer);
@@ -946,7 +940,6 @@
   MacroAssembler assembler(isolate, buffer, static_cast<int>(actual_size));
 
   MacroAssembler* masm = &assembler;
-  masm->set_allow_stub_calls(false);
   EntryCode(masm);
   Label exit;
 
@@ -1159,7 +1152,7 @@
   // Allocate an executable page of memory.
   size_t actual_size;
   byte* buffer =
-      static_cast<byte*>(OS::Allocate(Assembler::kMinimalBufferSize * 2,
+      static_cast<byte*>(OS::Allocate(Assembler::kMinimalBufferSize * 4,
                                       &actual_size,
                                       true));
   CHECK(buffer);
@@ -1168,7 +1161,6 @@
   MacroAssembler assembler(isolate, buffer, static_cast<int>(actual_size));
 
   MacroAssembler* masm = &assembler;
-  masm->set_allow_stub_calls(false);
   EntryCode(masm);
   Label exit;
 
@@ -1259,7 +1251,6 @@
   MacroAssembler assembler(isolate, buffer, static_cast<int>(actual_size));
 
   MacroAssembler* masm = &assembler;
-  masm->set_allow_stub_calls(false);
   EntryCode(masm);
   Label exit;
 
@@ -1366,7 +1357,6 @@
   MacroAssembler assembler(isolate, buffer, static_cast<int>(actual_size));
 
   MacroAssembler* masm = &assembler;
-  masm->set_allow_stub_calls(false);
   EntryCode(masm);
   Label exit;
 
@@ -1477,7 +1467,6 @@
   MacroAssembler assembler(isolate, buffer, static_cast<int>(actual_size));
 
   MacroAssembler* masm = &assembler;
-  masm->set_allow_stub_calls(false);
   EntryCode(masm);
   Label exit;
 
@@ -1566,7 +1555,7 @@
   // Allocate an executable page of memory.
   size_t actual_size;
   byte* buffer =
-      static_cast<byte*>(OS::Allocate(Assembler::kMinimalBufferSize * 3,
+      static_cast<byte*>(OS::Allocate(Assembler::kMinimalBufferSize * 4,
                                       &actual_size,
                                       true));
   CHECK(buffer);
@@ -1575,7 +1564,6 @@
   MacroAssembler assembler(isolate, buffer, static_cast<int>(actual_size));
 
   MacroAssembler* masm = &assembler;
-  masm->set_allow_stub_calls(false);
   EntryCode(masm);
   Label exit;
 
@@ -1636,7 +1624,7 @@
   // Allocate an executable page of memory.
   size_t actual_size;
   byte* buffer =
-      static_cast<byte*>(OS::Allocate(Assembler::kMinimalBufferSize,
+      static_cast<byte*>(OS::Allocate(Assembler::kMinimalBufferSize * 2,
                                       &actual_size,
                                       true));
   CHECK(buffer);
@@ -1645,7 +1633,6 @@
   MacroAssembler assembler(isolate, buffer, static_cast<int>(actual_size));
 
   MacroAssembler* masm = &assembler;
-  masm->set_allow_stub_calls(false);  // Avoid inline checks.
   EntryCode(masm);
   Label exit;
 
@@ -1716,7 +1703,7 @@
   // Allocate an executable page of memory.
   size_t actual_size;
   byte* buffer =
-      static_cast<byte*>(OS::Allocate(Assembler::kMinimalBufferSize,
+      static_cast<byte*>(OS::Allocate(Assembler::kMinimalBufferSize * 2,
                                       &actual_size,
                                       true));
   CHECK(buffer);
@@ -1725,7 +1712,6 @@
   MacroAssembler assembler(isolate, buffer, static_cast<int>(actual_size));
 
   MacroAssembler* masm = &assembler;
-  masm->set_allow_stub_calls(false);
   EntryCode(masm);
   Label exit;
 
@@ -1798,7 +1784,7 @@
   // Allocate an executable page of memory.
   size_t actual_size;
   byte* buffer =
-      static_cast<byte*>(OS::Allocate(Assembler::kMinimalBufferSize,
+      static_cast<byte*>(OS::Allocate(Assembler::kMinimalBufferSize * 2,
                                       &actual_size,
                                       true));
   CHECK(buffer);
@@ -1807,7 +1793,6 @@
   MacroAssembler assembler(isolate, buffer, static_cast<int>(actual_size));
 
   MacroAssembler* masm = &assembler;
-  masm->set_allow_stub_calls(false);
   EntryCode(masm);
   Label exit;
 
@@ -1882,7 +1867,7 @@
   // Allocate an executable page of memory.
   size_t actual_size;
   byte* buffer =
-      static_cast<byte*>(OS::Allocate(Assembler::kMinimalBufferSize,
+      static_cast<byte*>(OS::Allocate(Assembler::kMinimalBufferSize * 2,
                                       &actual_size,
                                       true));
   CHECK(buffer);
@@ -1891,7 +1876,6 @@
   MacroAssembler assembler(isolate, buffer, static_cast<int>(actual_size));
 
   MacroAssembler* masm = &assembler;
-  masm->set_allow_stub_calls(false);
   EntryCode(masm);
   Label exit;
 
@@ -1959,7 +1943,6 @@
   MacroAssembler assembler(isolate, buffer, static_cast<int>(actual_size));
 
   MacroAssembler* masm = &assembler;
-  masm->set_allow_stub_calls(false);
   EntryCode(masm);
   Label exit;
 
@@ -2047,7 +2030,7 @@
   // Allocate an executable page of memory.
   size_t actual_size;
   byte* buffer =
-      static_cast<byte*>(OS::Allocate(Assembler::kMinimalBufferSize * 4,
+      static_cast<byte*>(OS::Allocate(Assembler::kMinimalBufferSize * 7,
                                       &actual_size,
                                       true));
   CHECK(buffer);
@@ -2056,7 +2039,6 @@
   MacroAssembler assembler(isolate, buffer, static_cast<int>(actual_size));
 
   MacroAssembler* masm = &assembler;
-  masm->set_allow_stub_calls(false);
   EntryCode(masm);
   Label exit;
 
@@ -2154,7 +2136,7 @@
   // Allocate an executable page of memory.
   size_t actual_size;
   byte* buffer =
-      static_cast<byte*>(OS::Allocate(Assembler::kMinimalBufferSize * 3,
+      static_cast<byte*>(OS::Allocate(Assembler::kMinimalBufferSize * 5,
                                       &actual_size,
                                       true));
   CHECK(buffer);
@@ -2163,7 +2145,6 @@
   MacroAssembler assembler(isolate, buffer, static_cast<int>(actual_size));
 
   MacroAssembler* masm = &assembler;
-  masm->set_allow_stub_calls(false);
   EntryCode(masm);
   Label exit;
 
@@ -2224,7 +2205,7 @@
   // Allocate an executable page of memory.
   size_t actual_size;
   byte* buffer =
-      static_cast<byte*>(OS::Allocate(Assembler::kMinimalBufferSize * 2,
+      static_cast<byte*>(OS::Allocate(Assembler::kMinimalBufferSize * 3,
                                       &actual_size,
                                       true));
   CHECK(buffer);
@@ -2233,7 +2214,6 @@
   MacroAssembler assembler(isolate, buffer, static_cast<int>(actual_size));
 
   MacroAssembler* masm = &assembler;
-  masm->set_allow_stub_calls(false);
   EntryCode(masm);
   Label exit;
 
@@ -2298,7 +2278,6 @@
   MacroAssembler assembler(isolate, buffer, static_cast<int>(actual_size));
 
   MacroAssembler* masm = &assembler;
-  masm->set_allow_stub_calls(false);
   EntryCode(masm);
   Label exit;
 
@@ -2342,7 +2321,6 @@
   MacroAssembler assembler(isolate, buffer, static_cast<int>(actual_size));
 
   MacroAssembler* masm = &assembler;
-  masm->set_allow_stub_calls(false);
   Label exit;
 
   EntryCode(masm);
@@ -2695,7 +2673,6 @@
   HandleScope handles(isolate);
   MacroAssembler assembler(isolate, buffer, static_cast<int>(actual_size));
   MacroAssembler* masm = &assembler;  // Create a pointer for the __ macro.
-  masm->set_allow_stub_calls(false);
   EntryCode(masm);
   __ subq(rsp, Immediate(1 * kPointerSize));
   Label exit;
diff --git a/test/mjsunit/object-freeze.js b/test/mjsunit/object-freeze.js
index a0717a1..3b79874 100644
--- a/test/mjsunit/object-freeze.js
+++ b/test/mjsunit/object-freeze.js
@@ -314,3 +314,26 @@
 Object.freeze(obj);
 assertTrue(%HasFastProperties(obj));
 assertTrue(Object.isFrozen(obj));
+
+// Test array built-in functions with freeze.
+obj = [1,2,3];
+Object.freeze(obj);
+// if frozen implies sealed, then the tests in object-seal.js are mostly
+// sufficient.
+assertTrue(Object.isSealed(obj));
+
+assertDoesNotThrow(function() { obj.push(); });
+assertDoesNotThrow(function() { obj.unshift(); });
+assertDoesNotThrow(function() { obj.splice(0,0); });
+assertTrue(Object.isFrozen(obj));
+
+// Verify that an item can't be changed with splice.
+assertThrows(function() { obj.splice(0,1,1); }, TypeError);
+
+// Verify that unshift() with no arguments will fail if it reifies from
+// the prototype into the object.
+obj = [1,,3];
+obj.__proto__[1] = 1;
+assertEquals(1, obj[1]);
+Object.freeze(obj);
+assertThrows(function() { obj.unshift(); }, TypeError);
diff --git a/test/mjsunit/object-seal.js b/test/mjsunit/object-seal.js
index f21baed..f31f0b7 100644
--- a/test/mjsunit/object-seal.js
+++ b/test/mjsunit/object-seal.js
@@ -28,6 +28,7 @@
 // Tests the Object.seal and Object.isSealed methods - ES 15.2.3.9 and
 // ES 15.2.3.12
 
+// Flags: --allow-natives-syntax --noalways-opt
 
 // Test that we throw an error if an object is not passed as argument.
 var non_objects = new Array(undefined, null, 1, -1, 0, 42.43);
@@ -192,3 +193,78 @@
 // Make sure that Object.seal returns the sealed object.
 var obj4 = {};
 assertTrue(obj4 === Object.seal(obj4));
+
+//
+// Test that built-in array functions can't modify a sealed array.
+//
+obj = [1, 2, 3];
+var objControl = [4, 5, 6];
+
+// Allow these functions to set up monomorphic calls, using custom built-ins.
+var push_call = function(a) { a.push(10); return a; }
+var pop_call = function(a) { return a.pop(); }
+for (var i = 0; i < 3; i++) {
+  push_call(obj);
+  pop_call(obj);
+}
+
+Object.seal(obj);
+assertThrows(function() { push_call(obj); }, TypeError);
+assertThrows(function() { pop_call(obj); }, TypeError);
+
+// But the control object is fine at these sites.
+assertDoesNotThrow(function() { push_call(objControl); });
+assertDoesNotThrow(function() { pop_call(objControl); });
+
+assertDoesNotThrow(function() { obj.push(); });
+assertThrows(function() { obj.push(3); }, TypeError);
+assertThrows(function() { obj.pop(); }, TypeError);
+assertThrows(function() { obj.shift(3); }, TypeError);
+assertDoesNotThrow(function() { obj.unshift(); });
+assertThrows(function() { obj.unshift(1); }, TypeError);
+assertThrows(function() { obj.splice(0, 0, 100, 101, 102); }, TypeError);
+assertDoesNotThrow(function() { obj.splice(0,0); });
+
+assertDoesNotThrow(function() { objControl.push(3); });
+assertDoesNotThrow(function() { objControl.pop(); });
+assertDoesNotThrow(function() { objControl.shift(3); });
+assertDoesNotThrow(function() { objControl.unshift(); });
+assertDoesNotThrow(function() { objControl.splice(0, 0, 100, 101, 102); });
+
+// Verify that crankshaft still does the right thing.
+obj = [1, 2, 3];
+
+push_call = function(a) { a.push(1000); return a; }
+// Include a call site that doesn't have a custom built-in.
+var shift_call = function(a) { a.shift(1000); return a; }
+for (var i = 0; i < 3; i++) {
+  push_call(obj);
+  shift_call(obj);
+}
+
+%OptimizeFunctionOnNextCall(push_call);
+%OptimizeFunctionOnNextCall(shift_call);
+push_call(obj);
+shift_call(obj);
+assertOptimized(push_call);
+assertOptimized(shift_call);
+Object.seal(obj);
+assertThrows(function() { push_call(obj); }, TypeError);
+assertThrows(function() { shift_call(obj); }, TypeError);
+assertOptimized(push_call);
+// shift() doesn't have a custom call generator, so deopt will occur.
+assertUnoptimized(shift_call);
+assertDoesNotThrow(function() { push_call(objControl); });
+assertDoesNotThrow(function() { shift_call(objControl); });
+
+// Verify special behavior of splice on sealed objects.
+obj = [1,2,3];
+Object.seal(obj);
+assertDoesNotThrow(function() { obj.splice(0,1,100); });
+assertEquals(100, obj[0]);
+assertDoesNotThrow(function() { obj.splice(0,2,1,2); });
+assertDoesNotThrow(function() { obj.splice(1,2,1,2); });
+// Count of items to delete is clamped by length.
+assertDoesNotThrow(function() { obj.splice(1,2000,1,2); });
+assertThrows(function() { obj.splice(0,0,1); }, TypeError);
+assertThrows(function() { obj.splice(1,2000,1,2,3); }, TypeError);
diff --git a/test/mjsunit/regress/regress-2711.js b/test/mjsunit/regress/regress-2711.js
index a58e789..d5ac2ba 100644
--- a/test/mjsunit/regress/regress-2711.js
+++ b/test/mjsunit/regress/regress-2711.js
@@ -27,7 +27,7 @@
 
 // Test that frozen arrays don't let their length change
 var a = Object.freeze([1]);
-a.push(2);
+assertThrows(function() { a.push(2); }, TypeError);
 assertEquals(1, a.length);
-a.push(2);
+assertThrows(function() { a.push(2); }, TypeError);
 assertEquals(1, a.length);
diff --git a/test/mjsunit/regress/regress-299979.js b/test/mjsunit/regress/regress-299979.js
new file mode 100644
index 0000000..0afbcb3
--- /dev/null
+++ b/test/mjsunit/regress/regress-299979.js
@@ -0,0 +1,34 @@
+// Copyright 2013 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+//       notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+//       copyright notice, this list of conditions and the following
+//       disclaimer in the documentation and/or other materials provided
+//       with the distribution.
+//     * Neither the name of Google Inc. nor the names of its
+//       contributors may be used to endorse or promote products derived
+//       from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+(function(){
+  "use strict";
+  var list = Object.freeze([1, 2, 3]);
+  assertThrows(function() { list.unshift(4); }, TypeError);
+  assertThrows(function() { list.shift(); }, TypeError);
+})();
diff --git a/tools/push-to-trunk/auto_roll.py b/tools/push-to-trunk/auto_roll.py
index cb990cd..895ae54 100755
--- a/tools/push-to-trunk/auto_roll.py
+++ b/tools/push-to-trunk/auto_roll.py
@@ -62,7 +62,8 @@
 
   def RunStep(self):
     lkgr_url = "https://v8-status.appspot.com/lkgr"
-    self.Persist("lkgr", self.ReadURL(lkgr_url))
+    # Retry several times since app engine might have issues.
+    self.Persist("lkgr", self.ReadURL(lkgr_url, wait_plan=[5, 20, 300, 300]))
 
 
 class PushToTrunk(Step):
diff --git a/tools/push-to-trunk/common_includes.py b/tools/push-to-trunk/common_includes.py
index 4f77c6b..1965937 100644
--- a/tools/push-to-trunk/common_includes.py
+++ b/tools/push-to-trunk/common_includes.py
@@ -31,6 +31,7 @@
 import subprocess
 import sys
 import textwrap
+import time
 import urllib2
 
 PERSISTFILE_BASENAME = "PERSISTFILE_BASENAME"
@@ -173,6 +174,7 @@
 # Some commands don't like the pipe, e.g. calling vi from within the script or
 # from subscripts like git cl upload.
 def Command(cmd, args="", prefix="", pipe=True):
+  # TODO(machenbach): Use timeout.
   cmd_line = "%s %s %s" % (prefix, cmd, args)
   print "Command: %s" % cmd_line
   try:
@@ -200,6 +202,9 @@
     finally:
       url_fh.close()
 
+  def Sleep(seconds):
+    time.sleep(seconds)
+
 DEFAULT_SIDE_EFFECT_HANDLER = SideEffectHandler()
 
 
@@ -231,6 +236,35 @@
   def RunStep(self):
     raise NotImplementedError
 
+  def Retry(self, cb, retry_on=None, wait_plan=None):
+    """ Retry a function.
+    Params:
+      cb: The function to retry.
+      retry_on: A callback that takes the result of the function and returns
+                True if the function should be retried. A function throwing an
+                exception is always retried.
+      wait_plan: A list of waiting delays between retries in seconds. The
+                 maximum number of retries is len(wait_plan).
+    """
+    retry_on = retry_on or (lambda x: False)
+    wait_plan = list(wait_plan or [])
+    wait_plan.reverse()
+    while True:
+      got_exception = False
+      try:
+        result = cb()
+      except Exception:
+        got_exception = True
+      if got_exception or retry_on(result):
+        if not wait_plan:
+          raise Exception("Retried too often. Giving up.")
+        wait_time = wait_plan.pop()
+        print "Waiting for %f seconds." % wait_time
+        self._side_effect_handler.Sleep(wait_time)
+        print "Retrying..."
+      else:
+        return result
+
   def ReadLine(self, default=None):
     # Don't prompt in forced mode.
     if self._options and self._options.f and default is not None:
@@ -239,15 +273,18 @@
     else:
       return self._side_effect_handler.ReadLine()
 
-  def Git(self, args="", prefix="", pipe=True):
-    return self._side_effect_handler.Command("git", args, prefix, pipe)
+  def Git(self, args="", prefix="", pipe=True, retry_on=None):
+    cmd = lambda: self._side_effect_handler.Command("git", args, prefix, pipe)
+    return self.Retry(cmd, retry_on, [5, 30])
 
   def Editor(self, args):
     return self._side_effect_handler.Command(os.environ["EDITOR"], args,
                                              pipe=False)
 
-  def ReadURL(self, url):
-    return self._side_effect_handler.ReadURL(url)
+  def ReadURL(self, url, retry_on=None, wait_plan=None):
+    wait_plan = wait_plan or [3, 60, 600]
+    cmd = lambda: self._side_effect_handler.ReadURL(url)
+    return self.Retry(cmd, retry_on, wait_plan)
 
   def Die(self, msg=""):
     if msg != "":
diff --git a/tools/push-to-trunk/push_to_trunk.py b/tools/push-to-trunk/push_to_trunk.py
index d78aacb..cee871f 100755
--- a/tools/push-to-trunk/push_to_trunk.py
+++ b/tools/push-to-trunk/push_to_trunk.py
@@ -102,7 +102,9 @@
     if match:
       cl_url = "https://codereview.chromium.org/%s/description" % match.group(1)
       try:
-        body = self.ReadURL(cl_url)
+        # Fetch from Rietveld but only retry once with one second delay since
+        # there might be many revisions.
+        body = self.ReadURL(cl_url, wait_plan=[1])
       except urllib2.URLError:
         pass
     return body
diff --git a/tools/push-to-trunk/test_scripts.py b/tools/push-to-trunk/test_scripts.py
index f4d0c12..acb28a0 100644
--- a/tools/push-to-trunk/test_scripts.py
+++ b/tools/push-to-trunk/test_scripts.py
@@ -216,7 +216,12 @@
     # callback for checking the context at the time of the call.
     if len(expected_call) == len(args) + 2:
       expected_call[len(args) + 1]()
-    return expected_call[len(args)]
+    return_value = expected_call[len(args)]
+
+    # If the return value is an exception, raise it instead of returning.
+    if isinstance(return_value, Exception):
+      raise return_value
+    return return_value
 
   def AssertFinished(self):
     if self._index < len(self._recipe) -1:
@@ -269,6 +274,9 @@
   def ReadURL(self, url):
     return self._url_mock.Call(url)
 
+  def Sleep(self, seconds):
+    pass
+
   def ExpectGit(self, *args):
     """Convenience wrapper."""
     self._git_mock.Expect(*args)
@@ -674,6 +682,7 @@
     os.environ["EDITOR"] = "vi"
 
     self.ExpectReadURL([
+      ["https://v8-status.appspot.com/lkgr", Exception("Network problem")],
       ["https://v8-status.appspot.com/lkgr", "100"],
     ])