blob: b6498b1dbbce8786fa2d021df5196e7e274e0df6 [file] [log] [blame]
// Copyright 2013 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#include "hydrogen.h"
#include <algorithm>
#include "v8.h"
#include "allocation-site-scopes.h"
#include "codegen.h"
#include "full-codegen.h"
#include "hashmap.h"
#include "hydrogen-bce.h"
#include "hydrogen-bch.h"
#include "hydrogen-canonicalize.h"
#include "hydrogen-check-elimination.h"
#include "hydrogen-dce.h"
#include "hydrogen-dehoist.h"
#include "hydrogen-environment-liveness.h"
#include "hydrogen-escape-analysis.h"
#include "hydrogen-infer-representation.h"
#include "hydrogen-infer-types.h"
#include "hydrogen-load-elimination.h"
#include "hydrogen-gvn.h"
#include "hydrogen-mark-deoptimize.h"
#include "hydrogen-mark-unreachable.h"
#include "hydrogen-minus-zero.h"
#include "hydrogen-osr.h"
#include "hydrogen-range-analysis.h"
#include "hydrogen-redundant-phi.h"
#include "hydrogen-removable-simulates.h"
#include "hydrogen-representation-changes.h"
#include "hydrogen-sce.h"
#include "hydrogen-uint32-analysis.h"
#include "lithium-allocator.h"
#include "parser.h"
#include "runtime.h"
#include "scopeinfo.h"
#include "scopes.h"
#include "stub-cache.h"
#include "typing.h"
#if V8_TARGET_ARCH_IA32
#include "ia32/lithium-codegen-ia32.h"
#elif V8_TARGET_ARCH_X64
#include "x64/lithium-codegen-x64.h"
#elif V8_TARGET_ARCH_ARM
#include "arm/lithium-codegen-arm.h"
#elif V8_TARGET_ARCH_MIPS
#include "mips/lithium-codegen-mips.h"
#else
#error Unsupported target architecture.
#endif
namespace v8 {
namespace internal {
HBasicBlock::HBasicBlock(HGraph* graph)
: block_id_(graph->GetNextBlockID()),
graph_(graph),
phis_(4, graph->zone()),
first_(NULL),
last_(NULL),
end_(NULL),
loop_information_(NULL),
predecessors_(2, graph->zone()),
dominator_(NULL),
dominated_blocks_(4, graph->zone()),
last_environment_(NULL),
argument_count_(-1),
first_instruction_index_(-1),
last_instruction_index_(-1),
deleted_phis_(4, graph->zone()),
parent_loop_header_(NULL),
inlined_entry_block_(NULL),
is_inline_return_target_(false),
is_reachable_(true),
dominates_loop_successors_(false),
is_osr_entry_(false) { }
Isolate* HBasicBlock::isolate() const {
return graph_->isolate();
}
void HBasicBlock::MarkUnreachable() {
is_reachable_ = false;
}
void HBasicBlock::AttachLoopInformation() {
ASSERT(!IsLoopHeader());
loop_information_ = new(zone()) HLoopInformation(this, zone());
}
void HBasicBlock::DetachLoopInformation() {
ASSERT(IsLoopHeader());
loop_information_ = NULL;
}
void HBasicBlock::AddPhi(HPhi* phi) {
ASSERT(!IsStartBlock());
phis_.Add(phi, zone());
phi->SetBlock(this);
}
void HBasicBlock::RemovePhi(HPhi* phi) {
ASSERT(phi->block() == this);
ASSERT(phis_.Contains(phi));
phi->Kill();
phis_.RemoveElement(phi);
phi->SetBlock(NULL);
}
void HBasicBlock::AddInstruction(HInstruction* instr, int position) {
ASSERT(!IsStartBlock() || !IsFinished());
ASSERT(!instr->IsLinked());
ASSERT(!IsFinished());
if (position != RelocInfo::kNoPosition) {
instr->set_position(position);
}
if (first_ == NULL) {
ASSERT(last_environment() != NULL);
ASSERT(!last_environment()->ast_id().IsNone());
HBlockEntry* entry = new(zone()) HBlockEntry();
entry->InitializeAsFirst(this);
if (position != RelocInfo::kNoPosition) {
entry->set_position(position);
} else {
ASSERT(!FLAG_emit_opt_code_positions ||
!graph()->info()->IsOptimizing());
}
first_ = last_ = entry;
}
instr->InsertAfter(last_);
}
HPhi* HBasicBlock::AddNewPhi(int merged_index) {
if (graph()->IsInsideNoSideEffectsScope()) {
merged_index = HPhi::kInvalidMergedIndex;
}
HPhi* phi = new(zone()) HPhi(merged_index, zone());
AddPhi(phi);
return phi;
}
HSimulate* HBasicBlock::CreateSimulate(BailoutId ast_id,
RemovableSimulate removable) {
ASSERT(HasEnvironment());
HEnvironment* environment = last_environment();
ASSERT(ast_id.IsNone() ||
ast_id == BailoutId::StubEntry() ||
environment->closure()->shared()->VerifyBailoutId(ast_id));
int push_count = environment->push_count();
int pop_count = environment->pop_count();
HSimulate* instr =
new(zone()) HSimulate(ast_id, pop_count, zone(), removable);
#ifdef DEBUG
instr->set_closure(environment->closure());
#endif
// Order of pushed values: newest (top of stack) first. This allows
// HSimulate::MergeWith() to easily append additional pushed values
// that are older (from further down the stack).
for (int i = 0; i < push_count; ++i) {
instr->AddPushedValue(environment->ExpressionStackAt(i));
}
for (GrowableBitVector::Iterator it(environment->assigned_variables(),
zone());
!it.Done();
it.Advance()) {
int index = it.Current();
instr->AddAssignedValue(index, environment->Lookup(index));
}
environment->ClearHistory();
return instr;
}
void HBasicBlock::Finish(HControlInstruction* end, int position) {
ASSERT(!IsFinished());
AddInstruction(end, position);
end_ = end;
for (HSuccessorIterator it(end); !it.Done(); it.Advance()) {
it.Current()->RegisterPredecessor(this);
}
}
void HBasicBlock::Goto(HBasicBlock* block,
int position,
FunctionState* state,
bool add_simulate) {
bool drop_extra = state != NULL &&
state->inlining_kind() == DROP_EXTRA_ON_RETURN;
if (block->IsInlineReturnTarget()) {
HEnvironment* env = last_environment();
int argument_count = env->arguments_environment()->parameter_count();
AddInstruction(new(zone())
HLeaveInlined(state->entry(), argument_count),
position);
UpdateEnvironment(last_environment()->DiscardInlined(drop_extra));
}
if (add_simulate) AddNewSimulate(BailoutId::None(), position);
HGoto* instr = new(zone()) HGoto(block);
Finish(instr, position);
}
void HBasicBlock::AddLeaveInlined(HValue* return_value,
FunctionState* state,
int position) {
HBasicBlock* target = state->function_return();
bool drop_extra = state->inlining_kind() == DROP_EXTRA_ON_RETURN;
ASSERT(target->IsInlineReturnTarget());
ASSERT(return_value != NULL);
HEnvironment* env = last_environment();
int argument_count = env->arguments_environment()->parameter_count();
AddInstruction(new(zone()) HLeaveInlined(state->entry(), argument_count),
position);
UpdateEnvironment(last_environment()->DiscardInlined(drop_extra));
last_environment()->Push(return_value);
AddNewSimulate(BailoutId::None(), position);
HGoto* instr = new(zone()) HGoto(target);
Finish(instr, position);
}
void HBasicBlock::SetInitialEnvironment(HEnvironment* env) {
ASSERT(!HasEnvironment());
ASSERT(first() == NULL);
UpdateEnvironment(env);
}
void HBasicBlock::UpdateEnvironment(HEnvironment* env) {
last_environment_ = env;
graph()->update_maximum_environment_size(env->first_expression_index());
}
void HBasicBlock::SetJoinId(BailoutId ast_id) {
int length = predecessors_.length();
ASSERT(length > 0);
for (int i = 0; i < length; i++) {
HBasicBlock* predecessor = predecessors_[i];
ASSERT(predecessor->end()->IsGoto());
HSimulate* simulate = HSimulate::cast(predecessor->end()->previous());
ASSERT(i != 0 ||
(predecessor->last_environment()->closure().is_null() ||
predecessor->last_environment()->closure()->shared()
->VerifyBailoutId(ast_id)));
simulate->set_ast_id(ast_id);
predecessor->last_environment()->set_ast_id(ast_id);
}
}
bool HBasicBlock::Dominates(HBasicBlock* other) const {
HBasicBlock* current = other->dominator();
while (current != NULL) {
if (current == this) return true;
current = current->dominator();
}
return false;
}
int HBasicBlock::LoopNestingDepth() const {
const HBasicBlock* current = this;
int result = (current->IsLoopHeader()) ? 1 : 0;
while (current->parent_loop_header() != NULL) {
current = current->parent_loop_header();
result++;
}
return result;
}
void HBasicBlock::PostProcessLoopHeader(IterationStatement* stmt) {
ASSERT(IsLoopHeader());
SetJoinId(stmt->EntryId());
if (predecessors()->length() == 1) {
// This is a degenerated loop.
DetachLoopInformation();
return;
}
// Only the first entry into the loop is from outside the loop. All other
// entries must be back edges.
for (int i = 1; i < predecessors()->length(); ++i) {
loop_information()->RegisterBackEdge(predecessors()->at(i));
}
}
void HBasicBlock::RegisterPredecessor(HBasicBlock* pred) {
if (HasPredecessor()) {
// Only loop header blocks can have a predecessor added after
// instructions have been added to the block (they have phis for all
// values in the environment, these phis may be eliminated later).
ASSERT(IsLoopHeader() || first_ == NULL);
HEnvironment* incoming_env = pred->last_environment();
if (IsLoopHeader()) {
ASSERT(phis()->length() == incoming_env->length());
for (int i = 0; i < phis_.length(); ++i) {
phis_[i]->AddInput(incoming_env->values()->at(i));
}
} else {
last_environment()->AddIncomingEdge(this, pred->last_environment());
}
} else if (!HasEnvironment() && !IsFinished()) {
ASSERT(!IsLoopHeader());
SetInitialEnvironment(pred->last_environment()->Copy());
}
predecessors_.Add(pred, zone());
}
void HBasicBlock::AddDominatedBlock(HBasicBlock* block) {
ASSERT(!dominated_blocks_.Contains(block));
// Keep the list of dominated blocks sorted such that if there is two
// succeeding block in this list, the predecessor is before the successor.
int index = 0;
while (index < dominated_blocks_.length() &&
dominated_blocks_[index]->block_id() < block->block_id()) {
++index;
}
dominated_blocks_.InsertAt(index, block, zone());
}
void HBasicBlock::AssignCommonDominator(HBasicBlock* other) {
if (dominator_ == NULL) {
dominator_ = other;
other->AddDominatedBlock(this);
} else if (other->dominator() != NULL) {
HBasicBlock* first = dominator_;
HBasicBlock* second = other;
while (first != second) {
if (first->block_id() > second->block_id()) {
first = first->dominator();
} else {
second = second->dominator();
}
ASSERT(first != NULL && second != NULL);
}
if (dominator_ != first) {
ASSERT(dominator_->dominated_blocks_.Contains(this));
dominator_->dominated_blocks_.RemoveElement(this);
dominator_ = first;
first->AddDominatedBlock(this);
}
}
}
void HBasicBlock::AssignLoopSuccessorDominators() {
// Mark blocks that dominate all subsequent reachable blocks inside their
// loop. Exploit the fact that blocks are sorted in reverse post order. When
// the loop is visited in increasing block id order, if the number of
// non-loop-exiting successor edges at the dominator_candidate block doesn't
// exceed the number of previously encountered predecessor edges, there is no
// path from the loop header to any block with higher id that doesn't go
// through the dominator_candidate block. In this case, the
// dominator_candidate block is guaranteed to dominate all blocks reachable
// from it with higher ids.
HBasicBlock* last = loop_information()->GetLastBackEdge();
int outstanding_successors = 1; // one edge from the pre-header
// Header always dominates everything.
MarkAsLoopSuccessorDominator();
for (int j = block_id(); j <= last->block_id(); ++j) {
HBasicBlock* dominator_candidate = graph_->blocks()->at(j);
for (HPredecessorIterator it(dominator_candidate); !it.Done();
it.Advance()) {
HBasicBlock* predecessor = it.Current();
// Don't count back edges.
if (predecessor->block_id() < dominator_candidate->block_id()) {
outstanding_successors--;
}
}
// If more successors than predecessors have been seen in the loop up to
// now, it's not possible to guarantee that the current block dominates
// all of the blocks with higher IDs. In this case, assume conservatively
// that those paths through loop that don't go through the current block
// contain all of the loop's dependencies. Also be careful to record
// dominator information about the current loop that's being processed,
// and not nested loops, which will be processed when
// AssignLoopSuccessorDominators gets called on their header.
ASSERT(outstanding_successors >= 0);
HBasicBlock* parent_loop_header = dominator_candidate->parent_loop_header();
if (outstanding_successors == 0 &&
(parent_loop_header == this && !dominator_candidate->IsLoopHeader())) {
dominator_candidate->MarkAsLoopSuccessorDominator();
}
HControlInstruction* end = dominator_candidate->end();
for (HSuccessorIterator it(end); !it.Done(); it.Advance()) {
HBasicBlock* successor = it.Current();
// Only count successors that remain inside the loop and don't loop back
// to a loop header.
if (successor->block_id() > dominator_candidate->block_id() &&
successor->block_id() <= last->block_id()) {
// Backwards edges must land on loop headers.
ASSERT(successor->block_id() > dominator_candidate->block_id() ||
successor->IsLoopHeader());
outstanding_successors++;
}
}
}
}
int HBasicBlock::PredecessorIndexOf(HBasicBlock* predecessor) const {
for (int i = 0; i < predecessors_.length(); ++i) {
if (predecessors_[i] == predecessor) return i;
}
UNREACHABLE();
return -1;
}
#ifdef DEBUG
void HBasicBlock::Verify() {
// Check that every block is finished.
ASSERT(IsFinished());
ASSERT(block_id() >= 0);
// Check that the incoming edges are in edge split form.
if (predecessors_.length() > 1) {
for (int i = 0; i < predecessors_.length(); ++i) {
ASSERT(predecessors_[i]->end()->SecondSuccessor() == NULL);
}
}
}
#endif
void HLoopInformation::RegisterBackEdge(HBasicBlock* block) {
this->back_edges_.Add(block, block->zone());
AddBlock(block);
}
HBasicBlock* HLoopInformation::GetLastBackEdge() const {
int max_id = -1;
HBasicBlock* result = NULL;
for (int i = 0; i < back_edges_.length(); ++i) {
HBasicBlock* cur = back_edges_[i];
if (cur->block_id() > max_id) {
max_id = cur->block_id();
result = cur;
}
}
return result;
}
void HLoopInformation::AddBlock(HBasicBlock* block) {
if (block == loop_header()) return;
if (block->parent_loop_header() == loop_header()) return;
if (block->parent_loop_header() != NULL) {
AddBlock(block->parent_loop_header());
} else {
block->set_parent_loop_header(loop_header());
blocks_.Add(block, block->zone());
for (int i = 0; i < block->predecessors()->length(); ++i) {
AddBlock(block->predecessors()->at(i));
}
}
}
#ifdef DEBUG
// Checks reachability of the blocks in this graph and stores a bit in
// the BitVector "reachable()" for every block that can be reached
// from the start block of the graph. If "dont_visit" is non-null, the given
// block is treated as if it would not be part of the graph. "visited_count()"
// returns the number of reachable blocks.
class ReachabilityAnalyzer BASE_EMBEDDED {
public:
ReachabilityAnalyzer(HBasicBlock* entry_block,
int block_count,
HBasicBlock* dont_visit)
: visited_count_(0),
stack_(16, entry_block->zone()),
reachable_(block_count, entry_block->zone()),
dont_visit_(dont_visit) {
PushBlock(entry_block);
Analyze();
}
int visited_count() const { return visited_count_; }
const BitVector* reachable() const { return &reachable_; }
private:
void PushBlock(HBasicBlock* block) {
if (block != NULL && block != dont_visit_ &&
!reachable_.Contains(block->block_id())) {
reachable_.Add(block->block_id());
stack_.Add(block, block->zone());
visited_count_++;
}
}
void Analyze() {
while (!stack_.is_empty()) {
HControlInstruction* end = stack_.RemoveLast()->end();
for (HSuccessorIterator it(end); !it.Done(); it.Advance()) {
PushBlock(it.Current());
}
}
}
int visited_count_;
ZoneList<HBasicBlock*> stack_;
BitVector reachable_;
HBasicBlock* dont_visit_;
};
void HGraph::Verify(bool do_full_verify) const {
Heap::RelocationLock relocation_lock(isolate()->heap());
AllowHandleDereference allow_deref;
AllowDeferredHandleDereference allow_deferred_deref;
for (int i = 0; i < blocks_.length(); i++) {
HBasicBlock* block = blocks_.at(i);
block->Verify();
// Check that every block contains at least one node and that only the last
// node is a control instruction.
HInstruction* current = block->first();
ASSERT(current != NULL && current->IsBlockEntry());
while (current != NULL) {
ASSERT((current->next() == NULL) == current->IsControlInstruction());
ASSERT(current->block() == block);
current->Verify();
current = current->next();
}
// Check that successors are correctly set.
HBasicBlock* first = block->end()->FirstSuccessor();
HBasicBlock* second = block->end()->SecondSuccessor();
ASSERT(second == NULL || first != NULL);
// Check that the predecessor array is correct.
if (first != NULL) {
ASSERT(first->predecessors()->Contains(block));
if (second != NULL) {
ASSERT(second->predecessors()->Contains(block));
}
}
// Check that phis have correct arguments.
for (int j = 0; j < block->phis()->length(); j++) {
HPhi* phi = block->phis()->at(j);
phi->Verify();
}
// Check that all join blocks have predecessors that end with an
// unconditional goto and agree on their environment node id.
if (block->predecessors()->length() >= 2) {
BailoutId id =
block->predecessors()->first()->last_environment()->ast_id();
for (int k = 0; k < block->predecessors()->length(); k++) {
HBasicBlock* predecessor = block->predecessors()->at(k);
ASSERT(predecessor->end()->IsGoto() ||
predecessor->end()->IsDeoptimize());
ASSERT(predecessor->last_environment()->ast_id() == id);
}
}
}
// Check special property of first block to have no predecessors.
ASSERT(blocks_.at(0)->predecessors()->is_empty());
if (do_full_verify) {
// Check that the graph is fully connected.
ReachabilityAnalyzer analyzer(entry_block_, blocks_.length(), NULL);
ASSERT(analyzer.visited_count() == blocks_.length());
// Check that entry block dominator is NULL.
ASSERT(entry_block_->dominator() == NULL);
// Check dominators.
for (int i = 0; i < blocks_.length(); ++i) {
HBasicBlock* block = blocks_.at(i);
if (block->dominator() == NULL) {
// Only start block may have no dominator assigned to.
ASSERT(i == 0);
} else {
// Assert that block is unreachable if dominator must not be visited.
ReachabilityAnalyzer dominator_analyzer(entry_block_,
blocks_.length(),
block->dominator());
ASSERT(!dominator_analyzer.reachable()->Contains(block->block_id()));
}
}
}
}
#endif
HConstant* HGraph::GetConstant(SetOncePointer<HConstant>* pointer,
int32_t value) {
if (!pointer->is_set()) {
// Can't pass GetInvalidContext() to HConstant::New, because that will
// recursively call GetConstant
HConstant* constant = HConstant::New(zone(), NULL, value);
constant->InsertAfter(entry_block()->first());
pointer->set(constant);
return constant;
}
return ReinsertConstantIfNecessary(pointer->get());
}
HConstant* HGraph::ReinsertConstantIfNecessary(HConstant* constant) {
if (!constant->IsLinked()) {
// The constant was removed from the graph. Reinsert.
constant->ClearFlag(HValue::kIsDead);
constant->InsertAfter(entry_block()->first());
}
return constant;
}
HConstant* HGraph::GetConstant0() {
return GetConstant(&constant_0_, 0);
}
HConstant* HGraph::GetConstant1() {
return GetConstant(&constant_1_, 1);
}
HConstant* HGraph::GetConstantMinus1() {
return GetConstant(&constant_minus1_, -1);
}
#define DEFINE_GET_CONSTANT(Name, name, htype, boolean_value) \
HConstant* HGraph::GetConstant##Name() { \
if (!constant_##name##_.is_set()) { \
HConstant* constant = new(zone()) HConstant( \
Unique<Object>::CreateImmovable(isolate()->factory()->name##_value()), \
Representation::Tagged(), \
htype, \
false, \
true, \
false, \
boolean_value); \
constant->InsertAfter(entry_block()->first()); \
constant_##name##_.set(constant); \
} \
return ReinsertConstantIfNecessary(constant_##name##_.get()); \
}
DEFINE_GET_CONSTANT(Undefined, undefined, HType::Tagged(), false)
DEFINE_GET_CONSTANT(True, true, HType::Boolean(), true)
DEFINE_GET_CONSTANT(False, false, HType::Boolean(), false)
DEFINE_GET_CONSTANT(Hole, the_hole, HType::Tagged(), false)
DEFINE_GET_CONSTANT(Null, null, HType::Tagged(), false)
#undef DEFINE_GET_CONSTANT
#define DEFINE_IS_CONSTANT(Name, name) \
bool HGraph::IsConstant##Name(HConstant* constant) { \
return constant_##name##_.is_set() && constant == constant_##name##_.get(); \
}
DEFINE_IS_CONSTANT(Undefined, undefined)
DEFINE_IS_CONSTANT(0, 0)
DEFINE_IS_CONSTANT(1, 1)
DEFINE_IS_CONSTANT(Minus1, minus1)
DEFINE_IS_CONSTANT(True, true)
DEFINE_IS_CONSTANT(False, false)
DEFINE_IS_CONSTANT(Hole, the_hole)
DEFINE_IS_CONSTANT(Null, null)
#undef DEFINE_IS_CONSTANT
HConstant* HGraph::GetInvalidContext() {
return GetConstant(&constant_invalid_context_, 0xFFFFC0C7);
}
bool HGraph::IsStandardConstant(HConstant* constant) {
if (IsConstantUndefined(constant)) return true;
if (IsConstant0(constant)) return true;
if (IsConstant1(constant)) return true;
if (IsConstantMinus1(constant)) return true;
if (IsConstantTrue(constant)) return true;
if (IsConstantFalse(constant)) return true;
if (IsConstantHole(constant)) return true;
if (IsConstantNull(constant)) return true;
return false;
}
HGraphBuilder::IfBuilder::IfBuilder(HGraphBuilder* builder)
: builder_(builder),
finished_(false),
did_then_(false),
did_else_(false),
did_else_if_(false),
did_and_(false),
did_or_(false),
captured_(false),
needs_compare_(true),
pending_merge_block_(false),
split_edge_merge_block_(NULL),
merge_at_join_blocks_(NULL),
normal_merge_at_join_block_count_(0),
deopt_merge_at_join_block_count_(0) {
HEnvironment* env = builder->environment();
first_true_block_ = builder->CreateBasicBlock(env->Copy());
first_false_block_ = builder->CreateBasicBlock(env->Copy());
}
HGraphBuilder::IfBuilder::IfBuilder(
HGraphBuilder* builder,
HIfContinuation* continuation)
: builder_(builder),
finished_(false),
did_then_(false),
did_else_(false),
did_else_if_(false),
did_and_(false),
did_or_(false),
captured_(false),
needs_compare_(false),
pending_merge_block_(false),
first_true_block_(NULL),
first_false_block_(NULL),
split_edge_merge_block_(NULL),
merge_at_join_blocks_(NULL),
normal_merge_at_join_block_count_(0),
deopt_merge_at_join_block_count_(0) {
continuation->Continue(&first_true_block_,
&first_false_block_);
}
HControlInstruction* HGraphBuilder::IfBuilder::AddCompare(
HControlInstruction* compare) {
ASSERT(did_then_ == did_else_);
if (did_else_) {
// Handle if-then-elseif
did_else_if_ = true;
did_else_ = false;
did_then_ = false;
did_and_ = false;
did_or_ = false;
pending_merge_block_ = false;
split_edge_merge_block_ = NULL;
HEnvironment* env = builder_->environment();
first_true_block_ = builder_->CreateBasicBlock(env->Copy());
first_false_block_ = builder_->CreateBasicBlock(env->Copy());
}
if (split_edge_merge_block_ != NULL) {
HEnvironment* env = first_false_block_->last_environment();
HBasicBlock* split_edge =
builder_->CreateBasicBlock(env->Copy());
if (did_or_) {
compare->SetSuccessorAt(0, split_edge);
compare->SetSuccessorAt(1, first_false_block_);
} else {
compare->SetSuccessorAt(0, first_true_block_);
compare->SetSuccessorAt(1, split_edge);
}
builder_->GotoNoSimulate(split_edge, split_edge_merge_block_);
} else {
compare->SetSuccessorAt(0, first_true_block_);
compare->SetSuccessorAt(1, first_false_block_);
}
builder_->FinishCurrentBlock(compare);
needs_compare_ = false;
return compare;
}
void HGraphBuilder::IfBuilder::Or() {
ASSERT(!needs_compare_);
ASSERT(!did_and_);
did_or_ = true;
HEnvironment* env = first_false_block_->last_environment();
if (split_edge_merge_block_ == NULL) {
split_edge_merge_block_ =
builder_->CreateBasicBlock(env->Copy());
builder_->GotoNoSimulate(first_true_block_, split_edge_merge_block_);
first_true_block_ = split_edge_merge_block_;
}
builder_->set_current_block(first_false_block_);
first_false_block_ = builder_->CreateBasicBlock(env->Copy());
}
void HGraphBuilder::IfBuilder::And() {
ASSERT(!needs_compare_);
ASSERT(!did_or_);
did_and_ = true;
HEnvironment* env = first_false_block_->last_environment();
if (split_edge_merge_block_ == NULL) {
split_edge_merge_block_ = builder_->CreateBasicBlock(env->Copy());
builder_->GotoNoSimulate(first_false_block_, split_edge_merge_block_);
first_false_block_ = split_edge_merge_block_;
}
builder_->set_current_block(first_true_block_);
first_true_block_ = builder_->CreateBasicBlock(env->Copy());
}
void HGraphBuilder::IfBuilder::CaptureContinuation(
HIfContinuation* continuation) {
ASSERT(!did_else_if_);
ASSERT(!finished_);
ASSERT(!captured_);
HBasicBlock* true_block = NULL;
HBasicBlock* false_block = NULL;
Finish(&true_block, &false_block);
ASSERT(true_block != NULL);
ASSERT(false_block != NULL);
continuation->Capture(true_block, false_block);
captured_ = true;
builder_->set_current_block(NULL);
End();
}
void HGraphBuilder::IfBuilder::JoinContinuation(HIfContinuation* continuation) {
ASSERT(!did_else_if_);
ASSERT(!finished_);
ASSERT(!captured_);
HBasicBlock* true_block = NULL;
HBasicBlock* false_block = NULL;
Finish(&true_block, &false_block);
merge_at_join_blocks_ = NULL;
if (true_block != NULL && !true_block->IsFinished()) {
ASSERT(continuation->IsTrueReachable());
builder_->GotoNoSimulate(true_block, continuation->true_branch());
}
if (false_block != NULL && !false_block->IsFinished()) {
ASSERT(continuation->IsFalseReachable());
builder_->GotoNoSimulate(false_block, continuation->false_branch());
}
captured_ = true;
End();
}
void HGraphBuilder::IfBuilder::Then() {
ASSERT(!captured_);
ASSERT(!finished_);
did_then_ = true;
if (needs_compare_) {
// Handle if's without any expressions, they jump directly to the "else"
// branch. However, we must pretend that the "then" branch is reachable,
// so that the graph builder visits it and sees any live range extending
// constructs within it.
HConstant* constant_false = builder_->graph()->GetConstantFalse();
ToBooleanStub::Types boolean_type = ToBooleanStub::Types();
boolean_type.Add(ToBooleanStub::BOOLEAN);
HBranch* branch = builder()->New<HBranch>(
constant_false, boolean_type, first_true_block_, first_false_block_);
builder_->FinishCurrentBlock(branch);
}
builder_->set_current_block(first_true_block_);
pending_merge_block_ = true;
}
void HGraphBuilder::IfBuilder::Else() {
ASSERT(did_then_);
ASSERT(!captured_);
ASSERT(!finished_);
AddMergeAtJoinBlock(false);
builder_->set_current_block(first_false_block_);
pending_merge_block_ = true;
did_else_ = true;
}
void HGraphBuilder::IfBuilder::Deopt(const char* reason) {
ASSERT(did_then_);
builder_->Add<HDeoptimize>(reason, Deoptimizer::EAGER);
AddMergeAtJoinBlock(true);
}
void HGraphBuilder::IfBuilder::Return(HValue* value) {
HValue* parameter_count = builder_->graph()->GetConstantMinus1();
builder_->FinishExitCurrentBlock(
builder_->New<HReturn>(value, parameter_count));
AddMergeAtJoinBlock(false);
}
void HGraphBuilder::IfBuilder::AddMergeAtJoinBlock(bool deopt) {
if (!pending_merge_block_) return;
HBasicBlock* block = builder_->current_block();
ASSERT(block == NULL || !block->IsFinished());
MergeAtJoinBlock* record =
new(builder_->zone()) MergeAtJoinBlock(block, deopt,
merge_at_join_blocks_);
merge_at_join_blocks_ = record;
if (block != NULL) {
ASSERT(block->end() == NULL);
if (deopt) {
normal_merge_at_join_block_count_++;
} else {
deopt_merge_at_join_block_count_++;
}
}
builder_->set_current_block(NULL);
pending_merge_block_ = false;
}
void HGraphBuilder::IfBuilder::Finish() {
ASSERT(!finished_);
if (!did_then_) {
Then();
}
AddMergeAtJoinBlock(false);
if (!did_else_) {
Else();
AddMergeAtJoinBlock(false);
}
finished_ = true;
}
void HGraphBuilder::IfBuilder::Finish(HBasicBlock** then_continuation,
HBasicBlock** else_continuation) {
Finish();
MergeAtJoinBlock* else_record = merge_at_join_blocks_;
if (else_continuation != NULL) {
*else_continuation = else_record->block_;
}
MergeAtJoinBlock* then_record = else_record->next_;
if (then_continuation != NULL) {
*then_continuation = then_record->block_;
}
ASSERT(then_record->next_ == NULL);
}
void HGraphBuilder::IfBuilder::End() {
if (captured_) return;
Finish();
int total_merged_blocks = normal_merge_at_join_block_count_ +
deopt_merge_at_join_block_count_;
ASSERT(total_merged_blocks >= 1);
HBasicBlock* merge_block = total_merged_blocks == 1
? NULL : builder_->graph()->CreateBasicBlock();
// Merge non-deopt blocks first to ensure environment has right size for
// padding.
MergeAtJoinBlock* current = merge_at_join_blocks_;
while (current != NULL) {
if (!current->deopt_ && current->block_ != NULL) {
// If there is only one block that makes it through to the end of the
// if, then just set it as the current block and continue rather then
// creating an unnecessary merge block.
if (total_merged_blocks == 1) {
builder_->set_current_block(current->block_);
return;
}
builder_->GotoNoSimulate(current->block_, merge_block);
}
current = current->next_;
}
// Merge deopt blocks, padding when necessary.
current = merge_at_join_blocks_;
while (current != NULL) {
if (current->deopt_ && current->block_ != NULL) {
builder_->PadEnvironmentForContinuation(current->block_,
merge_block);
builder_->GotoNoSimulate(current->block_, merge_block);
}
current = current->next_;
}
builder_->set_current_block(merge_block);
}
HGraphBuilder::LoopBuilder::LoopBuilder(HGraphBuilder* builder,
HValue* context,
LoopBuilder::Direction direction)
: builder_(builder),
context_(context),
direction_(direction),
finished_(false) {
header_block_ = builder->CreateLoopHeaderBlock();
body_block_ = NULL;
exit_block_ = NULL;
exit_trampoline_block_ = NULL;
increment_amount_ = builder_->graph()->GetConstant1();
}
HGraphBuilder::LoopBuilder::LoopBuilder(HGraphBuilder* builder,
HValue* context,
LoopBuilder::Direction direction,
HValue* increment_amount)
: builder_(builder),
context_(context),
direction_(direction),
finished_(false) {
header_block_ = builder->CreateLoopHeaderBlock();
body_block_ = NULL;
exit_block_ = NULL;
exit_trampoline_block_ = NULL;
increment_amount_ = increment_amount;
}
HValue* HGraphBuilder::LoopBuilder::BeginBody(
HValue* initial,
HValue* terminating,
Token::Value token) {
HEnvironment* env = builder_->environment();
phi_ = header_block_->AddNewPhi(env->values()->length());
phi_->AddInput(initial);
env->Push(initial);
builder_->GotoNoSimulate(header_block_);
HEnvironment* body_env = env->Copy();
HEnvironment* exit_env = env->Copy();
// Remove the phi from the expression stack
body_env->Pop();
exit_env->Pop();
body_block_ = builder_->CreateBasicBlock(body_env);
exit_block_ = builder_->CreateBasicBlock(exit_env);
builder_->set_current_block(header_block_);
env->Pop();
builder_->FinishCurrentBlock(builder_->New<HCompareNumericAndBranch>(
phi_, terminating, token, body_block_, exit_block_));
builder_->set_current_block(body_block_);
if (direction_ == kPreIncrement || direction_ == kPreDecrement) {
HValue* one = builder_->graph()->GetConstant1();
if (direction_ == kPreIncrement) {
increment_ = HAdd::New(zone(), context_, phi_, one);
} else {
increment_ = HSub::New(zone(), context_, phi_, one);
}
increment_->ClearFlag(HValue::kCanOverflow);
builder_->AddInstruction(increment_);
return increment_;
} else {
return phi_;
}
}
void HGraphBuilder::LoopBuilder::Break() {
if (exit_trampoline_block_ == NULL) {
// Its the first time we saw a break.
HEnvironment* env = exit_block_->last_environment()->Copy();
exit_trampoline_block_ = builder_->CreateBasicBlock(env);
builder_->GotoNoSimulate(exit_block_, exit_trampoline_block_);
}
builder_->GotoNoSimulate(exit_trampoline_block_);
builder_->set_current_block(NULL);
}
void HGraphBuilder::LoopBuilder::EndBody() {
ASSERT(!finished_);
if (direction_ == kPostIncrement || direction_ == kPostDecrement) {
if (direction_ == kPostIncrement) {
increment_ = HAdd::New(zone(), context_, phi_, increment_amount_);
} else {
increment_ = HSub::New(zone(), context_, phi_, increment_amount_);
}
increment_->ClearFlag(HValue::kCanOverflow);
builder_->AddInstruction(increment_);
}
// Push the new increment value on the expression stack to merge into the phi.
builder_->environment()->Push(increment_);
HBasicBlock* last_block = builder_->current_block();
builder_->GotoNoSimulate(last_block, header_block_);
header_block_->loop_information()->RegisterBackEdge(last_block);
if (exit_trampoline_block_ != NULL) {
builder_->set_current_block(exit_trampoline_block_);
} else {
builder_->set_current_block(exit_block_);
}
finished_ = true;
}
HGraph* HGraphBuilder::CreateGraph() {
graph_ = new(zone()) HGraph(info_);
if (FLAG_hydrogen_stats) isolate()->GetHStatistics()->Initialize(info_);
CompilationPhase phase("H_Block building", info_);
set_current_block(graph()->entry_block());
if (!BuildGraph()) return NULL;
graph()->FinalizeUniqueness();
return graph_;
}
HInstruction* HGraphBuilder::AddInstruction(HInstruction* instr) {
ASSERT(current_block() != NULL);
ASSERT(!FLAG_emit_opt_code_positions ||
position_ != RelocInfo::kNoPosition || !info_->IsOptimizing());
current_block()->AddInstruction(instr, position_);
if (graph()->IsInsideNoSideEffectsScope()) {
instr->SetFlag(HValue::kHasNoObservableSideEffects);
}
return instr;
}
void HGraphBuilder::FinishCurrentBlock(HControlInstruction* last) {
ASSERT(!FLAG_emit_opt_code_positions || !info_->IsOptimizing() ||
position_ != RelocInfo::kNoPosition);
current_block()->Finish(last, position_);
if (last->IsReturn() || last->IsAbnormalExit()) {
set_current_block(NULL);
}
}
void HGraphBuilder::FinishExitCurrentBlock(HControlInstruction* instruction) {
ASSERT(!FLAG_emit_opt_code_positions || !info_->IsOptimizing() ||
position_ != RelocInfo::kNoPosition);
current_block()->FinishExit(instruction, position_);
if (instruction->IsReturn() || instruction->IsAbnormalExit()) {
set_current_block(NULL);
}
}
void HGraphBuilder::AddIncrementCounter(StatsCounter* counter) {
if (FLAG_native_code_counters && counter->Enabled()) {
HValue* reference = Add<HConstant>(ExternalReference(counter));
HValue* old_value = Add<HLoadNamedField>(reference,
HObjectAccess::ForCounter());
HValue* new_value = AddUncasted<HAdd>(old_value, graph()->GetConstant1());
new_value->ClearFlag(HValue::kCanOverflow); // Ignore counter overflow
Add<HStoreNamedField>(reference, HObjectAccess::ForCounter(),
new_value);
}
}
void HGraphBuilder::AddSimulate(BailoutId id,
RemovableSimulate removable) {
ASSERT(current_block() != NULL);
ASSERT(!graph()->IsInsideNoSideEffectsScope());
current_block()->AddNewSimulate(id, removable);
}
HBasicBlock* HGraphBuilder::CreateBasicBlock(HEnvironment* env) {
HBasicBlock* b = graph()->CreateBasicBlock();
b->SetInitialEnvironment(env);
return b;
}
HBasicBlock* HGraphBuilder::CreateLoopHeaderBlock() {
HBasicBlock* header = graph()->CreateBasicBlock();
HEnvironment* entry_env = environment()->CopyAsLoopHeader(header);
header->SetInitialEnvironment(entry_env);
header->AttachLoopInformation();
return header;
}
HValue* HGraphBuilder::BuildCheckHeapObject(HValue* obj) {
if (obj->type().IsHeapObject()) return obj;
return Add<HCheckHeapObject>(obj);
}
void HGraphBuilder::FinishExitWithHardDeoptimization(
const char* reason, HBasicBlock* continuation) {
PadEnvironmentForContinuation(current_block(), continuation);
Add<HDeoptimize>(reason, Deoptimizer::EAGER);
if (graph()->IsInsideNoSideEffectsScope()) {
GotoNoSimulate(continuation);
} else {
Goto(continuation);
}
}
void HGraphBuilder::PadEnvironmentForContinuation(
HBasicBlock* from,
HBasicBlock* continuation) {
if (continuation->last_environment() != NULL) {
// When merging from a deopt block to a continuation, resolve differences in
// environment by pushing constant 0 and popping extra values so that the
// environments match during the join. Push 0 since it has the most specific
// representation, and will not influence representation inference of the
// phi.
int continuation_env_length = continuation->last_environment()->length();
while (continuation_env_length != from->last_environment()->length()) {
if (continuation_env_length > from->last_environment()->length()) {
from->last_environment()->Push(graph()->GetConstant0());
} else {
from->last_environment()->Pop();
}
}
} else {
ASSERT(continuation->predecessors()->length() == 0);
}
}
HValue* HGraphBuilder::BuildCheckMap(HValue* obj, Handle<Map> map) {
return Add<HCheckMaps>(obj, map, top_info());
}
HValue* HGraphBuilder::BuildCheckString(HValue* string) {
if (!string->type().IsString()) {
ASSERT(!string->IsConstant() ||
!HConstant::cast(string)->HasStringValue());
BuildCheckHeapObject(string);
return Add<HCheckInstanceType>(string, HCheckInstanceType::IS_STRING);
}
return string;
}
HValue* HGraphBuilder::BuildWrapReceiver(HValue* object, HValue* function) {
if (object->type().IsJSObject()) return object;
return Add<HWrapReceiver>(object, function);
}
HValue* HGraphBuilder::BuildCheckForCapacityGrow(HValue* object,
HValue* elements,
ElementsKind kind,
HValue* length,
HValue* key,
bool is_js_array) {
IfBuilder length_checker(this);
Token::Value token = IsHoleyElementsKind(kind) ? Token::GTE : Token::EQ;
length_checker.If<HCompareNumericAndBranch>(key, length, token);
length_checker.Then();
HValue* current_capacity = AddLoadFixedArrayLength(elements);
IfBuilder capacity_checker(this);
capacity_checker.If<HCompareNumericAndBranch>(key, current_capacity,
Token::GTE);
capacity_checker.Then();
HValue* max_gap = Add<HConstant>(static_cast<int32_t>(JSObject::kMaxGap));
HValue* max_capacity = AddUncasted<HAdd>(current_capacity, max_gap);
IfBuilder key_checker(this);
key_checker.If<HCompareNumericAndBranch>(key, max_capacity, Token::LT);
key_checker.Then();
key_checker.ElseDeopt("Key out of capacity range");
key_checker.End();
HValue* new_capacity = BuildNewElementsCapacity(key);
HValue* new_elements = BuildGrowElementsCapacity(object, elements,
kind, kind, length,
new_capacity);
environment()->Push(new_elements);
capacity_checker.Else();
environment()->Push(elements);
capacity_checker.End();
if (is_js_array) {
HValue* new_length = AddUncasted<HAdd>(key, graph_->GetConstant1());
new_length->ClearFlag(HValue::kCanOverflow);
Add<HStoreNamedField>(object, HObjectAccess::ForArrayLength(kind),
new_length);
}
length_checker.Else();
Add<HBoundsCheck>(key, length);
environment()->Push(elements);
length_checker.End();
return environment()->Pop();
}
HValue* HGraphBuilder::BuildCopyElementsOnWrite(HValue* object,
HValue* elements,
ElementsKind kind,
HValue* length) {
Factory* factory = isolate()->factory();
IfBuilder cow_checker(this);
cow_checker.If<HCompareMap>(elements, factory->fixed_cow_array_map());
cow_checker.Then();
HValue* capacity = AddLoadFixedArrayLength(elements);
HValue* new_elements = BuildGrowElementsCapacity(object, elements, kind,
kind, length, capacity);
environment()->Push(new_elements);
cow_checker.Else();
environment()->Push(elements);
cow_checker.End();
return environment()->Pop();
}
void HGraphBuilder::BuildTransitionElementsKind(HValue* object,
HValue* map,
ElementsKind from_kind,
ElementsKind to_kind,
bool is_jsarray) {
ASSERT(!IsFastHoleyElementsKind(from_kind) ||
IsFastHoleyElementsKind(to_kind));
if (AllocationSite::GetMode(from_kind, to_kind) == TRACK_ALLOCATION_SITE) {
Add<HTrapAllocationMemento>(object);
}
if (!IsSimpleMapChangeTransition(from_kind, to_kind)) {
HInstruction* elements = AddLoadElements(object);
HInstruction* empty_fixed_array = Add<HConstant>(
isolate()->factory()->empty_fixed_array());
IfBuilder if_builder(this);
if_builder.IfNot<HCompareObjectEqAndBranch>(elements, empty_fixed_array);
if_builder.Then();
HInstruction* elements_length = AddLoadFixedArrayLength(elements);
HInstruction* array_length = is_jsarray
? Add<HLoadNamedField>(object, HObjectAccess::ForArrayLength(from_kind))
: elements_length;
BuildGrowElementsCapacity(object, elements, from_kind, to_kind,
array_length, elements_length);
if_builder.End();
}
Add<HStoreNamedField>(object, HObjectAccess::ForMap(), map);
}
HValue* HGraphBuilder::BuildUncheckedDictionaryElementLoadHelper(
HValue* elements,
HValue* key,
HValue* hash,
HValue* mask,
int current_probe) {
if (current_probe == kNumberDictionaryProbes) {
return NULL;
}
int32_t offset = SeededNumberDictionary::GetProbeOffset(current_probe);
HValue* raw_index = (current_probe == 0)
? hash
: AddUncasted<HAdd>(hash, Add<HConstant>(offset));
raw_index = AddUncasted<HBitwise>(Token::BIT_AND, raw_index, mask);
int32_t entry_size = SeededNumberDictionary::kEntrySize;
raw_index = AddUncasted<HMul>(raw_index, Add<HConstant>(entry_size));
raw_index->ClearFlag(HValue::kCanOverflow);
int32_t base_offset = SeededNumberDictionary::kElementsStartIndex;
HValue* key_index = AddUncasted<HAdd>(raw_index, Add<HConstant>(base_offset));
key_index->ClearFlag(HValue::kCanOverflow);
HValue* candidate_key = Add<HLoadKeyed>(elements, key_index,
static_cast<HValue*>(NULL),
FAST_SMI_ELEMENTS);
IfBuilder key_compare(this);
key_compare.IfNot<HCompareObjectEqAndBranch>(key, candidate_key);
key_compare.Then();
{
// Key at the current probe doesn't match, try at the next probe.
HValue* result = BuildUncheckedDictionaryElementLoadHelper(
elements, key, hash, mask, current_probe + 1);
if (result == NULL) {
key_compare.Deopt("probes exhausted in keyed load dictionary lookup");
result = graph()->GetConstantUndefined();
} else {
Push(result);
}
}
key_compare.Else();
{
// Key at current probe matches. Details must be zero, otherwise the
// dictionary element requires special handling.
HValue* details_index = AddUncasted<HAdd>(
raw_index, Add<HConstant>(base_offset + 2));
details_index->ClearFlag(HValue::kCanOverflow);
HValue* details = Add<HLoadKeyed>(elements, details_index,
static_cast<HValue*>(NULL),
FAST_SMI_ELEMENTS);
IfBuilder details_compare(this);
details_compare.If<HCompareNumericAndBranch>(details,
graph()->GetConstant0(),
Token::NE);
details_compare.ThenDeopt("keyed load dictionary element not fast case");
details_compare.Else();
{
// Key matches and details are zero --> fast case. Load and return the
// value.
HValue* result_index = AddUncasted<HAdd>(
raw_index, Add<HConstant>(base_offset + 1));
result_index->ClearFlag(HValue::kCanOverflow);
Push(Add<HLoadKeyed>(elements, result_index,
static_cast<HValue*>(NULL),
FAST_ELEMENTS));
}
details_compare.End();
}
key_compare.End();
return Pop();
}
HValue* HGraphBuilder::BuildElementIndexHash(HValue* index) {
int32_t seed_value = static_cast<uint32_t>(isolate()->heap()->HashSeed());
HValue* seed = Add<HConstant>(seed_value);
HValue* hash = AddUncasted<HBitwise>(Token::BIT_XOR, index, seed);
// hash = ~hash + (hash << 15);
HValue* shifted_hash = AddUncasted<HShl>(hash, Add<HConstant>(15));
HValue* not_hash = AddUncasted<HBitwise>(Token::BIT_XOR, hash,
graph()->GetConstantMinus1());
hash = AddUncasted<HAdd>(shifted_hash, not_hash);
// hash = hash ^ (hash >> 12);
shifted_hash = AddUncasted<HShr>(hash, Add<HConstant>(12));
hash = AddUncasted<HBitwise>(Token::BIT_XOR, hash, shifted_hash);
// hash = hash + (hash << 2);
shifted_hash = AddUncasted<HShl>(hash, Add<HConstant>(2));
hash = AddUncasted<HAdd>(hash, shifted_hash);
// hash = hash ^ (hash >> 4);
shifted_hash = AddUncasted<HShr>(hash, Add<HConstant>(4));
hash = AddUncasted<HBitwise>(Token::BIT_XOR, hash, shifted_hash);
// hash = hash * 2057;
hash = AddUncasted<HMul>(hash, Add<HConstant>(2057));
hash->ClearFlag(HValue::kCanOverflow);
// hash = hash ^ (hash >> 16);
shifted_hash = AddUncasted<HShr>(hash, Add<HConstant>(16));
return AddUncasted<HBitwise>(Token::BIT_XOR, hash, shifted_hash);
}
HValue* HGraphBuilder::BuildUncheckedDictionaryElementLoad(HValue* receiver,
HValue* key) {
HValue* elements = AddLoadElements(receiver);
HValue* hash = BuildElementIndexHash(key);
HValue* capacity = Add<HLoadKeyed>(
elements,
Add<HConstant>(NameDictionary::kCapacityIndex),
static_cast<HValue*>(NULL),
FAST_SMI_ELEMENTS);
HValue* mask = AddUncasted<HSub>(capacity, graph()->GetConstant1());
mask->ChangeRepresentation(Representation::Integer32());
mask->ClearFlag(HValue::kCanOverflow);
return BuildUncheckedDictionaryElementLoadHelper(elements, key,
hash, mask, 0);
}
HValue* HGraphBuilder::BuildNumberToString(HValue* object,
Handle<Type> type) {
NoObservableSideEffectsScope scope(this);
// Convert constant numbers at compile time.
if (object->IsConstant() && HConstant::cast(object)->HasNumberValue()) {
Handle<Object> number = HConstant::cast(object)->handle(isolate());
Handle<String> result = isolate()->factory()->NumberToString(number);
return Add<HConstant>(result);
}
// Create a joinable continuation.
HIfContinuation found(graph()->CreateBasicBlock(),
graph()->CreateBasicBlock());
// Load the number string cache.
HValue* number_string_cache =
Add<HLoadRoot>(Heap::kNumberStringCacheRootIndex);
// Make the hash mask from the length of the number string cache. It
// contains two elements (number and string) for each cache entry.
HValue* mask = AddLoadFixedArrayLength(number_string_cache);
mask->set_type(HType::Smi());
mask = AddUncasted<HSar>(mask, graph()->GetConstant1());
mask = AddUncasted<HSub>(mask, graph()->GetConstant1());
// Check whether object is a smi.
IfBuilder if_objectissmi(this);
if_objectissmi.If<HIsSmiAndBranch>(object);
if_objectissmi.Then();
{
// Compute hash for smi similar to smi_get_hash().
HValue* hash = AddUncasted<HBitwise>(Token::BIT_AND, object, mask);
// Load the key.
HValue* key_index = AddUncasted<HShl>(hash, graph()->GetConstant1());
HValue* key = Add<HLoadKeyed>(number_string_cache, key_index,
static_cast<HValue*>(NULL),
FAST_ELEMENTS, ALLOW_RETURN_HOLE);
// Check if object == key.
IfBuilder if_objectiskey(this);
if_objectiskey.If<HCompareObjectEqAndBranch>(object, key);
if_objectiskey.Then();
{
// Make the key_index available.
Push(key_index);
}
if_objectiskey.JoinContinuation(&found);
}
if_objectissmi.Else();
{
if (type->Is(Type::Smi())) {
if_objectissmi.Deopt("Expected smi");
} else {
// Check if the object is a heap number.
IfBuilder if_objectisnumber(this);
if_objectisnumber.If<HCompareMap>(
object, isolate()->factory()->heap_number_map());
if_objectisnumber.Then();
{
// Compute hash for heap number similar to double_get_hash().
HValue* low = Add<HLoadNamedField>(
object, HObjectAccess::ForHeapNumberValueLowestBits());
HValue* high = Add<HLoadNamedField>(
object, HObjectAccess::ForHeapNumberValueHighestBits());
HValue* hash = AddUncasted<HBitwise>(Token::BIT_XOR, low, high);
hash = AddUncasted<HBitwise>(Token::BIT_AND, hash, mask);
// Load the key.
HValue* key_index = AddUncasted<HShl>(hash, graph()->GetConstant1());
HValue* key = Add<HLoadKeyed>(number_string_cache, key_index,
static_cast<HValue*>(NULL),
FAST_ELEMENTS, ALLOW_RETURN_HOLE);
// Check if key is a heap number (the number string cache contains only
// SMIs and heap number, so it is sufficient to do a SMI check here).
IfBuilder if_keyisnotsmi(this);
if_keyisnotsmi.IfNot<HIsSmiAndBranch>(key);
if_keyisnotsmi.Then();
{
// Check if values of key and object match.
IfBuilder if_keyeqobject(this);
if_keyeqobject.If<HCompareNumericAndBranch>(
Add<HLoadNamedField>(key, HObjectAccess::ForHeapNumberValue()),
Add<HLoadNamedField>(object, HObjectAccess::ForHeapNumberValue()),
Token::EQ);
if_keyeqobject.Then();
{
// Make the key_index available.
Push(key_index);
}
if_keyeqobject.JoinContinuation(&found);
}
if_keyisnotsmi.JoinContinuation(&found);
}
if_objectisnumber.Else();
{
if (type->Is(Type::Number())) {
if_objectisnumber.Deopt("Expected heap number");
}
}
if_objectisnumber.JoinContinuation(&found);
}
}
if_objectissmi.JoinContinuation(&found);
// Check for cache hit.
IfBuilder if_found(this, &found);
if_found.Then();
{
// Count number to string operation in native code.
AddIncrementCounter(isolate()->counters()->number_to_string_native());
// Load the value in case of cache hit.
HValue* key_index = Pop();
HValue* value_index = AddUncasted<HAdd>(key_index, graph()->GetConstant1());
Push(Add<HLoadKeyed>(number_string_cache, value_index,
static_cast<HValue*>(NULL),
FAST_ELEMENTS, ALLOW_RETURN_HOLE));
}
if_found.Else();
{
// Cache miss, fallback to runtime.
Add<HPushArgument>(object);
Push(Add<HCallRuntime>(
isolate()->factory()->empty_string(),
Runtime::FunctionForId(Runtime::kNumberToStringSkipCache),
1));
}
if_found.End();
return Pop();
}
HValue* HGraphBuilder::BuildSeqStringSizeFor(HValue* length,
String::Encoding encoding) {
STATIC_ASSERT((SeqString::kHeaderSize & kObjectAlignmentMask) == 0);
HValue* size = length;
if (encoding == String::TWO_BYTE_ENCODING) {
size = AddUncasted<HShl>(length, graph()->GetConstant1());
size->ClearFlag(HValue::kCanOverflow);
size->SetFlag(HValue::kUint32);
}
size = AddUncasted<HAdd>(size, Add<HConstant>(static_cast<int32_t>(
SeqString::kHeaderSize + kObjectAlignmentMask)));
size->ClearFlag(HValue::kCanOverflow);
size = AddUncasted<HBitwise>(
Token::BIT_AND, size, Add<HConstant>(static_cast<int32_t>(
~kObjectAlignmentMask)));
return size;
}
void HGraphBuilder::BuildCopySeqStringChars(HValue* src,
HValue* src_offset,
String::Encoding src_encoding,
HValue* dst,
HValue* dst_offset,
String::Encoding dst_encoding,
HValue* length) {
ASSERT(dst_encoding != String::ONE_BYTE_ENCODING ||
src_encoding == String::ONE_BYTE_ENCODING);
LoopBuilder loop(this, context(), LoopBuilder::kPostIncrement);
HValue* index = loop.BeginBody(graph()->GetConstant0(), length, Token::LT);
{
HValue* src_index = AddUncasted<HAdd>(src_offset, index);
HValue* value =
AddUncasted<HSeqStringGetChar>(src_encoding, src, src_index);
HValue* dst_index = AddUncasted<HAdd>(dst_offset, index);
Add<HSeqStringSetChar>(dst_encoding, dst, dst_index, value);
}
loop.EndBody();
}
HValue* HGraphBuilder::BuildUncheckedStringAdd(HValue* left,
HValue* right,
PretenureFlag pretenure_flag) {
// Determine the string lengths.
HValue* left_length = Add<HLoadNamedField>(
left, HObjectAccess::ForStringLength());
HValue* right_length = Add<HLoadNamedField>(
right, HObjectAccess::ForStringLength());
// Compute the combined string length. If the result is larger than the max
// supported string length, we bailout to the runtime. This is done implicitly
// when converting the result back to a smi in case the max string length
// equals the max smi valie. Otherwise, for platforms with 32-bit smis, we do
HValue* length = AddUncasted<HAdd>(left_length, right_length);
STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue);
if (String::kMaxLength != Smi::kMaxValue) {
IfBuilder if_nooverflow(this);
if_nooverflow.If<HCompareNumericAndBranch>(
length, Add<HConstant>(String::kMaxLength), Token::LTE);
if_nooverflow.Then();
if_nooverflow.ElseDeopt("String length exceeds limit");
}
// Determine the string instance types.
HLoadNamedField* left_instance_type = Add<HLoadNamedField>(
Add<HLoadNamedField>(left, HObjectAccess::ForMap()),
HObjectAccess::ForMapInstanceType());
HLoadNamedField* right_instance_type = Add<HLoadNamedField>(
Add<HLoadNamedField>(right, HObjectAccess::ForMap()),
HObjectAccess::ForMapInstanceType());
// Compute difference of instance types.
HValue* xored_instance_types = AddUncasted<HBitwise>(
Token::BIT_XOR, left_instance_type, right_instance_type);
// Check if we should create a cons string.
IfBuilder if_createcons(this);
if_createcons.If<HCompareNumericAndBranch>(
length, Add<HConstant>(ConsString::kMinLength), Token::GTE);
if_createcons.Then();
{
// Allocate the cons string object. HAllocate does not care whether we
// pass CONS_STRING_TYPE or CONS_ASCII_STRING_TYPE here, so we just use
// CONS_STRING_TYPE here. Below we decide whether the cons string is
// one-byte or two-byte and set the appropriate map.
HAllocate* string = Add<HAllocate>(Add<HConstant>(ConsString::kSize),
HType::String(), pretenure_flag,
CONS_STRING_TYPE);
// Compute the intersection of instance types.
HValue* anded_instance_types = AddUncasted<HBitwise>(
Token::BIT_AND, left_instance_type, right_instance_type);
// We create a one-byte cons string if
// 1. both strings are one-byte, or
// 2. at least one of the strings is two-byte, but happens to contain only
// one-byte characters.
// To do this, we check
// 1. if both strings are one-byte, or if the one-byte data hint is set in
// both strings, or
// 2. if one of the strings has the one-byte data hint set and the other
// string is one-byte.
IfBuilder if_onebyte(this);
STATIC_ASSERT(kOneByteStringTag != 0);
STATIC_ASSERT(kOneByteDataHintMask != 0);
if_onebyte.If<HCompareNumericAndBranch>(
AddUncasted<HBitwise>(
Token::BIT_AND, anded_instance_types,
Add<HConstant>(static_cast<int32_t>(
kStringEncodingMask | kOneByteDataHintMask))),
graph()->GetConstant0(), Token::NE);
if_onebyte.Or();
STATIC_ASSERT(kOneByteStringTag != 0 &&
kOneByteDataHintTag != 0 &&
kOneByteDataHintTag != kOneByteStringTag);
if_onebyte.If<HCompareNumericAndBranch>(
AddUncasted<HBitwise>(
Token::BIT_AND, xored_instance_types,
Add<HConstant>(static_cast<int32_t>(
kOneByteStringTag | kOneByteDataHintTag))),
Add<HConstant>(static_cast<int32_t>(
kOneByteStringTag | kOneByteDataHintTag)), Token::EQ);
if_onebyte.Then();
{
// We can safely skip the write barrier for storing the map here.
Handle<Map> map = isolate()->factory()->cons_ascii_string_map();
AddStoreMapConstantNoWriteBarrier(string, map);
}
if_onebyte.Else();
{
// We can safely skip the write barrier for storing the map here.
Handle<Map> map = isolate()->factory()->cons_string_map();
AddStoreMapConstantNoWriteBarrier(string, map);
}
if_onebyte.End();
// Initialize the cons string fields.
Add<HStoreNamedField>(string, HObjectAccess::ForStringHashField(),
Add<HConstant>(String::kEmptyHashField));
Add<HStoreNamedField>(string, HObjectAccess::ForStringLength(), length);
Add<HStoreNamedField>(string, HObjectAccess::ForConsStringFirst(), left);
Add<HStoreNamedField>(string, HObjectAccess::ForConsStringSecond(),
right);
// Count the native string addition.
AddIncrementCounter(isolate()->counters()->string_add_native());
// Cons string is result.
Push(string);
}
if_createcons.Else();
{
// Compute union of instance types.
HValue* ored_instance_types = AddUncasted<HBitwise>(
Token::BIT_OR, left_instance_type, right_instance_type);
// Check if both strings have the same encoding and both are
// sequential.
IfBuilder if_sameencodingandsequential(this);
if_sameencodingandsequential.If<HCompareNumericAndBranch>(
AddUncasted<HBitwise>(
Token::BIT_AND, xored_instance_types,
Add<HConstant>(static_cast<int32_t>(kStringEncodingMask))),
graph()->GetConstant0(), Token::EQ);
if_sameencodingandsequential.And();
STATIC_ASSERT(kSeqStringTag == 0);
if_sameencodingandsequential.If<HCompareNumericAndBranch>(
AddUncasted<HBitwise>(
Token::BIT_AND, ored_instance_types,
Add<HConstant>(static_cast<int32_t>(kStringRepresentationMask))),
graph()->GetConstant0(), Token::EQ);
if_sameencodingandsequential.Then();
{
// Check if the result is a one-byte string.
IfBuilder if_onebyte(this);
STATIC_ASSERT(kOneByteStringTag != 0);
if_onebyte.If<HCompareNumericAndBranch>(
AddUncasted<HBitwise>(
Token::BIT_AND, ored_instance_types,
Add<HConstant>(static_cast<int32_t>(kStringEncodingMask))),
graph()->GetConstant0(), Token::NE);
if_onebyte.Then();
{
// Calculate the number of bytes needed for the characters in the
// string while observing object alignment.
HValue* size = BuildSeqStringSizeFor(
length, String::ONE_BYTE_ENCODING);
// Allocate the ASCII string object.
Handle<Map> map = isolate()->factory()->ascii_string_map();
HAllocate* string = Add<HAllocate>(size, HType::String(),
pretenure_flag, ASCII_STRING_TYPE);
string->set_known_initial_map(map);
// We can safely skip the write barrier for storing map here.
AddStoreMapConstantNoWriteBarrier(string, map);
// Length must be stored into the string before we copy characters to
// make debug verification code happy.
Add<HStoreNamedField>(string, HObjectAccess::ForStringLength(),
length);
// Copy bytes from the left string.
BuildCopySeqStringChars(
left, graph()->GetConstant0(), String::ONE_BYTE_ENCODING,
string, graph()->GetConstant0(), String::ONE_BYTE_ENCODING,
left_length);
// Copy bytes from the right string.
BuildCopySeqStringChars(
right, graph()->GetConstant0(), String::ONE_BYTE_ENCODING,
string, left_length, String::ONE_BYTE_ENCODING,
right_length);
// Count the native string addition.
AddIncrementCounter(isolate()->counters()->string_add_native());
// Return the string.
Push(string);
}
if_onebyte.Else();
{
// Calculate the number of bytes needed for the characters in the
// string while observing object alignment.
HValue* size = BuildSeqStringSizeFor(
length, String::TWO_BYTE_ENCODING);
// Allocate the two-byte string object.
Handle<Map> map = isolate()->factory()->string_map();
HAllocate* string = Add<HAllocate>(size, HType::String(),
pretenure_flag, STRING_TYPE);
string->set_known_initial_map(map);
// We can safely skip the write barrier for storing map here.
AddStoreMapConstantNoWriteBarrier(string, map);
// Length must be stored into the string before we copy characters to
// make debug verification code happy.
Add<HStoreNamedField>(string, HObjectAccess::ForStringLength(),
length);
// Copy bytes from the left string.
BuildCopySeqStringChars(
left, graph()->GetConstant0(), String::TWO_BYTE_ENCODING,
string, graph()->GetConstant0(), String::TWO_BYTE_ENCODING,
left_length);
// Copy bytes from the right string.
BuildCopySeqStringChars(
right, graph()->GetConstant0(), String::TWO_BYTE_ENCODING,
string, left_length, String::TWO_BYTE_ENCODING,
right_length);
// Return the string.
Push(string);
}
if_onebyte.End();
// Initialize the (common) string fields.
HValue* string = Pop();
Add<HStoreNamedField>(string, HObjectAccess::ForStringHashField(),
Add<HConstant>(String::kEmptyHashField));
// Count the native string addition.
AddIncrementCounter(isolate()->counters()->string_add_native());
Push(string);
}
if_sameencodingandsequential.Else();
{
// Fallback to the runtime to add the two strings.
Add<HPushArgument>(left);
Add<HPushArgument>(right);
Push(Add<HCallRuntime>(isolate()->factory()->empty_string(),
Runtime::FunctionForId(Runtime::kStringAdd),
2));
}
if_sameencodingandsequential.End();
}
if_createcons.End();
return Pop();
}
HValue* HGraphBuilder::BuildStringAdd(HValue* left,
HValue* right,
PretenureFlag pretenure_flag) {
// Determine the string lengths.
HValue* left_length = Add<HLoadNamedField>(
left, HObjectAccess::ForStringLength());
HValue* right_length = Add<HLoadNamedField>(
right, HObjectAccess::ForStringLength());
// Check if left string is empty.
IfBuilder if_leftisempty(this);
if_leftisempty.If<HCompareNumericAndBranch>(
left_length, graph()->GetConstant0(), Token::EQ);
if_leftisempty.Then();
{
// Count the native string addition.
AddIncrementCounter(isolate()->counters()->string_add_native());
// Just return the right string.
Push(right);
}
if_leftisempty.Else();
{
// Check if right string is empty.
IfBuilder if_rightisempty(this);
if_rightisempty.If<HCompareNumericAndBranch>(
right_length, graph()->GetConstant0(), Token::EQ);
if_rightisempty.Then();
{
// Count the native string addition.
AddIncrementCounter(isolate()->counters()->string_add_native());
// Just return the left string.
Push(left);
}
if_rightisempty.Else();
{
// Concatenate the two non-empty strings.
Push(BuildUncheckedStringAdd(left, right, pretenure_flag));
}
if_rightisempty.End();
}
if_leftisempty.End();
return Pop();
}
HInstruction* HGraphBuilder::BuildUncheckedMonomorphicElementAccess(
HValue* checked_object,
HValue* key,
HValue* val,
bool is_js_array,
ElementsKind elements_kind,
bool is_store,
LoadKeyedHoleMode load_mode,
KeyedAccessStoreMode store_mode) {
ASSERT(!IsExternalArrayElementsKind(elements_kind) || !is_js_array);
// No GVNFlag is necessary for ElementsKind if there is an explicit dependency
// on a HElementsTransition instruction. The flag can also be removed if the
// map to check has FAST_HOLEY_ELEMENTS, since there can be no further
// ElementsKind transitions. Finally, the dependency can be removed for stores
// for FAST_ELEMENTS, since a transition to HOLEY elements won't change the
// generated store code.
if ((elements_kind == FAST_HOLEY_ELEMENTS) ||
(elements_kind == FAST_ELEMENTS && is_store)) {
checked_object->ClearGVNFlag(kDependsOnElementsKind);
}
bool fast_smi_only_elements = IsFastSmiElementsKind(elements_kind);
bool fast_elements = IsFastObjectElementsKind(elements_kind);
HValue* elements = AddLoadElements(checked_object);
if (is_store && (fast_elements || fast_smi_only_elements) &&
store_mode != STORE_NO_TRANSITION_HANDLE_COW) {
HCheckMaps* check_cow_map = Add<HCheckMaps>(
elements, isolate()->factory()->fixed_array_map(), top_info());
check_cow_map->ClearGVNFlag(kDependsOnElementsKind);
}
HInstruction* length = NULL;
if (is_js_array) {
length = Add<HLoadNamedField>(
checked_object, HObjectAccess::ForArrayLength(elements_kind));
} else {
length = AddLoadFixedArrayLength(elements);
}
length->set_type(HType::Smi());
HValue* checked_key = NULL;
if (IsExternalArrayElementsKind(elements_kind)) {
if (store_mode == STORE_NO_TRANSITION_IGNORE_OUT_OF_BOUNDS) {
NoObservableSideEffectsScope no_effects(this);
HLoadExternalArrayPointer* external_elements =
Add<HLoadExternalArrayPointer>(elements);
IfBuilder length_checker(this);
length_checker.If<HCompareNumericAndBranch>(key, length, Token::LT);
length_checker.Then();
IfBuilder negative_checker(this);
HValue* bounds_check = negative_checker.If<HCompareNumericAndBranch>(
key, graph()->GetConstant0(), Token::GTE);
negative_checker.Then();
HInstruction* result = AddElementAccess(
external_elements, key, val, bounds_check, elements_kind, is_store);
negative_checker.ElseDeopt("Negative key encountered");
negative_checker.End();
length_checker.End();
return result;
} else {
ASSERT(store_mode == STANDARD_STORE);
checked_key = Add<HBoundsCheck>(key, length);
HLoadExternalArrayPointer* external_elements =
Add<HLoadExternalArrayPointer>(elements);
return AddElementAccess(
external_elements, checked_key, val,
checked_object, elements_kind, is_store);
}
}
ASSERT(fast_smi_only_elements ||
fast_elements ||
IsFastDoubleElementsKind(elements_kind));
// In case val is stored into a fast smi array, assure that the value is a smi
// before manipulating the backing store. Otherwise the actual store may
// deopt, leaving the backing store in an invalid state.
if (is_store && IsFastSmiElementsKind(elements_kind) &&
!val->type().IsSmi()) {
val = AddUncasted<HForceRepresentation>(val, Representation::Smi());
}
if (IsGrowStoreMode(store_mode)) {
NoObservableSideEffectsScope no_effects(this);
elements = BuildCheckForCapacityGrow(checked_object, elements,
elements_kind, length, key,
is_js_array);
checked_key = key;
} else {
checked_key = Add<HBoundsCheck>(key, length);
if (is_store && (fast_elements || fast_smi_only_elements)) {
if (store_mode == STORE_NO_TRANSITION_HANDLE_COW) {
NoObservableSideEffectsScope no_effects(this);
elements = BuildCopyElementsOnWrite(checked_object, elements,
elements_kind, length);
} else {
HCheckMaps* check_cow_map = Add<HCheckMaps>(
elements, isolate()->factory()->fixed_array_map(), top_info());
check_cow_map->ClearGVNFlag(kDependsOnElementsKind);
}
}
}
return AddElementAccess(elements, checked_key, val, checked_object,
elements_kind, is_store, load_mode);
}
HValue* HGraphBuilder::BuildAllocateArrayFromLength(
JSArrayBuilder* array_builder,
HValue* length_argument) {
if (length_argument->IsConstant() &&
HConstant::cast(length_argument)->HasSmiValue()) {
int array_length = HConstant::cast(length_argument)->Integer32Value();
HValue* new_object = array_length == 0
? array_builder->AllocateEmptyArray()
: array_builder->AllocateArray(length_argument, length_argument);
return new_object;
}
HValue* constant_zero = graph()->GetConstant0();
HConstant* max_alloc_length =
Add<HConstant>(JSObject::kInitialMaxFastElementArray);
HInstruction* checked_length = Add<HBoundsCheck>(length_argument,
max_alloc_length);
IfBuilder if_builder(this);
if_builder.If<HCompareNumericAndBranch>(checked_length, constant_zero,
Token::EQ);
if_builder.Then();
const int initial_capacity = JSArray::kPreallocatedArrayElements;
HConstant* initial_capacity_node = Add<HConstant>(initial_capacity);
Push(initial_capacity_node); // capacity
Push(constant_zero); // length
if_builder.Else();
if (!(top_info()->IsStub()) &&
IsFastPackedElementsKind(array_builder->kind())) {
// We'll come back later with better (holey) feedback.
if_builder.Deopt("Holey array despite packed elements_kind feedback");
} else {
Push(checked_length); // capacity
Push(checked_length); // length
}
if_builder.End();
// Figure out total size
HValue* length = Pop();
HValue* capacity = Pop();
return array_builder->AllocateArray(capacity, length);
}
HValue* HGraphBuilder::BuildAllocateElements(ElementsKind kind,
HValue* capacity) {
int elements_size;
InstanceType instance_type;
if (IsFastDoubleElementsKind(kind)) {
elements_size = kDoubleSize;
instance_type = FIXED_DOUBLE_ARRAY_TYPE;
} else {
elements_size = kPointerSize;
instance_type = FIXED_ARRAY_TYPE;
}
HConstant* elements_size_value = Add<HConstant>(elements_size);
HValue* mul = AddUncasted<HMul>(capacity, elements_size_value);
mul->ClearFlag(HValue::kCanOverflow);
HConstant* header_size = Add<HConstant>(FixedArray::kHeaderSize);
HValue* total_size = AddUncasted<HAdd>(mul, header_size);
total_size->ClearFlag(HValue::kCanOverflow);
return Add<HAllocate>(total_size, HType::JSArray(),
isolate()->heap()->GetPretenureMode(), instance_type);
}
void HGraphBuilder::BuildInitializeElementsHeader(HValue* elements,
ElementsKind kind,
HValue* capacity) {
Factory* factory = isolate()->factory();
Handle<Map> map = IsFastDoubleElementsKind(kind)
? factory->fixed_double_array_map()
: factory->fixed_array_map();
AddStoreMapConstant(elements, map);
Add<HStoreNamedField>(elements, HObjectAccess::ForFixedArrayLength(),
capacity);
}
HValue* HGraphBuilder::BuildAllocateElementsAndInitializeElementsHeader(
ElementsKind kind,
HValue* capacity) {
// The HForceRepresentation is to prevent possible deopt on int-smi
// conversion after allocation but before the new object fields are set.
capacity = AddUncasted<HForceRepresentation>(capacity, Representation::Smi());
HValue* new_elements = BuildAllocateElements(kind, capacity);
BuildInitializeElementsHeader(new_elements, kind, capacity);
return new_elements;
}
HInnerAllocatedObject* HGraphBuilder::BuildJSArrayHeader(HValue* array,
HValue* array_map,
AllocationSiteMode mode,
ElementsKind elements_kind,
HValue* allocation_site_payload,
HValue* length_field) {
Add<HStoreNamedField>(array, HObjectAccess::ForMap(), array_map);
HConstant* empty_fixed_array =
Add<HConstant>(isolate()->factory()->empty_fixed_array());
HObjectAccess access = HObjectAccess::ForPropertiesPointer();
Add<HStoreNamedField>(array, access, empty_fixed_array);
Add<HStoreNamedField>(array, HObjectAccess::ForArrayLength(elements_kind),
length_field);
if (mode == TRACK_ALLOCATION_SITE) {
BuildCreateAllocationMemento(array,
JSArray::kSize,
allocation_site_payload);
if (FLAG_allocation_site_pretenuring) {
// TODO(mvstanton): move this code into BuildCreateAllocationMemento when
// constructed arrays also pay attention to pretenuring.
HObjectAccess access =
HObjectAccess::ForAllocationSiteOffset(
AllocationSite::kMementoCreateCountOffset);
HValue* create_info = Add<HLoadNamedField>(allocation_site_payload,
access);
HInstruction* new_create_info =
AddUncasted<HAdd>(create_info, graph()->GetConstant1());
new_create_info->ClearFlag(HValue::kCanOverflow);
HStoreNamedField* store = Add<HStoreNamedField>(allocation_site_payload,
access, new_create_info);
// No write barrier needed to store a smi.
store->SkipWriteBarrier();
}
}
int elements_location = JSArray::kSize;
if (mode == TRACK_ALLOCATION_SITE) {
elements_location += AllocationMemento::kSize;
}
HValue* elements = Add<HInnerAllocatedObject>(array, elements_location);
Add<HStoreNamedField>(array, HObjectAccess::ForElementsPointer(), elements);
return static_cast<HInnerAllocatedObject*>(elements);
}
HInstruction* HGraphBuilder::AddElementAccess(
HValue* elements,
HValue* checked_key,
HValue* val,
HValue* dependency,
ElementsKind elements_kind,
bool is_store,
LoadKeyedHoleMode load_mode) {
if (is_store) {
ASSERT(val != NULL);
if (elements_kind == EXTERNAL_PIXEL_ELEMENTS) {
val = Add<HClampToUint8>(val);
}
return Add<HStoreKeyed>(elements, checked_key, val, elements_kind);
}
ASSERT(!is_store);
ASSERT(val == NULL);
HLoadKeyed* load = Add<HLoadKeyed>(
elements, checked_key, dependency, elements_kind, load_mode);
if (FLAG_opt_safe_uint32_operations &&
elements_kind == EXTERNAL_UNSIGNED_INT_ELEMENTS) {
graph()->RecordUint32Instruction(load);
}
return load;
}
HLoadNamedField* HGraphBuilder::AddLoadElements(HValue* object) {
return Add<HLoadNamedField>(object, HObjectAccess::ForElementsPointer());
}
HLoadNamedField* HGraphBuilder::AddLoadFixedArrayLength(HValue* object) {
return Add<HLoadNamedField>(object,
HObjectAccess::ForFixedArrayLength());
}
HValue* HGraphBuilder::BuildNewElementsCapacity(HValue* old_capacity) {
HValue* half_old_capacity = AddUncasted<HShr>(old_capacity,
graph_->GetConstant1());
HValue* new_capacity = AddUncasted<HAdd>(half_old_capacity, old_capacity);
new_capacity->ClearFlag(HValue::kCanOverflow);
HValue* min_growth = Add<HConstant>(16);
new_capacity = AddUncasted<HAdd>(new_capacity, min_growth);
new_capacity->ClearFlag(HValue::kCanOverflow);
return new_capacity;
}
void HGraphBuilder::BuildNewSpaceArrayCheck(HValue* length, ElementsKind kind) {
Heap* heap = isolate()->heap();
int element_size = IsFastDoubleElementsKind(kind) ? kDoubleSize
: kPointerSize;
int max_size = heap->MaxRegularSpaceAllocationSize() / element_size;
max_size -= JSArray::kSize / element_size;
HConstant* max_size_constant = Add<HConstant>(max_size);
Add<HBoundsCheck>(length, max_size_constant);
}
HValue* HGraphBuilder::BuildGrowElementsCapacity(HValue* object,
HValue* elements,
ElementsKind kind,
ElementsKind new_kind,
HValue* length,
HValue* new_capacity) {
BuildNewSpaceArrayCheck(new_capacity, new_kind);
HValue* new_elements = BuildAllocateElementsAndInitializeElementsHeader(
new_kind, new_capacity);
BuildCopyElements(elements, kind,
new_elements, new_kind,
length, new_capacity);
Add<HStoreNamedField>(object, HObjectAccess::ForElementsPointer(),
new_elements);
return new_elements;
}
void HGraphBuilder::BuildFillElementsWithHole(HValue* elements,
ElementsKind elements_kind,
HValue* from,
HValue* to) {
// Fast elements kinds need to be initialized in case statements below cause
// a garbage collection.
Factory* factory = isolate()->factory();
double nan_double = FixedDoubleArray::hole_nan_as_double();
HValue* hole = IsFastSmiOrObjectElementsKind(elements_kind)
? Add<HConstant>(factory->the_hole_value())
: Add<HConstant>(nan_double);
// Special loop unfolding case
static const int kLoopUnfoldLimit = 8;
STATIC_ASSERT(JSArray::kPreallocatedArrayElements <= kLoopUnfoldLimit);
int initial_capacity = -1;
if (from->IsInteger32Constant() && to->IsInteger32Constant()) {
int constant_from = from->GetInteger32Constant();
int constant_to = to->GetInteger32Constant();
if (constant_from == 0 && constant_to <= kLoopUnfoldLimit) {
initial_capacity = constant_to;
}
}
// Since we're about to store a hole value, the store instruction below must
// assume an elements kind that supports heap object values.
if (IsFastSmiOrObjectElementsKind(elements_kind)) {
elements_kind = FAST_HOLEY_ELEMENTS;
}
if (initial_capacity >= 0) {
for (int i = 0; i < initial_capacity; i++) {
HInstruction* key = Add<HConstant>(i);
Add<HStoreKeyed>(elements, key, hole, elements_kind);
}
} else {
LoopBuilder builder(this, context(), LoopBuilder::kPostIncrement);
HValue* key = builder.BeginBody(from, to, Token::LT);
Add<HStoreKeyed>(elements, key, hole, elements_kind);
builder.EndBody();
}
}
void HGraphBuilder::BuildCopyElements(HValue* from_elements,
ElementsKind from_elements_kind,
HValue* to_elements,
ElementsKind to_elements_kind,
HValue* length,
HValue* capacity) {
bool pre_fill_with_holes =
IsFastDoubleElementsKind(from_elements_kind) &&
IsFastObjectElementsKind(to_elements_kind);
if (pre_fill_with_holes) {
// If the copy might trigger a GC, make sure that the FixedArray is
// pre-initialized with holes to make sure that it's always in a consistent
// state.
BuildFillElementsWithHole(to_elements, to_elements_kind,
graph()->GetConstant0(), capacity);
}
LoopBuilder builder(this, context(), LoopBuilder::kPostIncrement);
HValue* key = builder.BeginBody(graph()->GetConstant0(), length, Token::LT);
HValue* element = Add<HLoadKeyed>(from_elements, key,
static_cast<HValue*>(NULL),
from_elements_kind,
ALLOW_RETURN_HOLE);
ElementsKind kind = (IsHoleyElementsKind(from_elements_kind) &&
IsFastSmiElementsKind(to_elements_kind))
? FAST_HOLEY_ELEMENTS : to_elements_kind;
if (IsHoleyElementsKind(from_elements_kind) &&
from_elements_kind != to_elements_kind) {
IfBuilder if_hole(this);
if_hole.If<HCompareHoleAndBranch>(element);
if_hole.Then();
HConstant* hole_constant = IsFastDoubleElementsKind(to_elements_kind)
? Add<HConstant>(FixedDoubleArray::hole_nan_as_double())
: graph()->GetConstantHole();
Add<HStoreKeyed>(to_elements, key, hole_constant, kind);
if_hole.Else();
HStoreKeyed* store = Add<HStoreKeyed>(to_elements, key, element, kind);
store->SetFlag(HValue::kAllowUndefinedAsNaN);
if_hole.End();
} else {
HStoreKeyed* store = Add<HStoreKeyed>(to_elements, key, element, kind);
store->SetFlag(HValue::kAllowUndefinedAsNaN);
}
builder.EndBody();
if (!pre_fill_with_holes && length != capacity) {
// Fill unused capacity with the hole.
BuildFillElementsWithHole(to_elements, to_elements_kind,
key, capacity);
}
}
HValue* HGraphBuilder::BuildCloneShallowArray(HValue* boilerplate,
HValue* allocation_site,
AllocationSiteMode mode,
ElementsKind kind,
int length) {
NoObservableSideEffectsScope no_effects(this);
// All sizes here are multiples of kPointerSize.
int size = JSArray::kSize;
if (mode == TRACK_ALLOCATION_SITE) {
size += AllocationMemento::kSize;
}
HValue* size_in_bytes = Add<HConstant>(size);
HInstruction* object = Add<HAllocate>(size_in_bytes,
HType::JSObject(),
NOT_TENURED,
JS_OBJECT_TYPE);
// Copy the JS array part.
for (int i = 0; i < JSArray::kSize; i += kPointerSize) {
if ((i != JSArray::kElementsOffset) || (length == 0)) {
HObjectAccess access = HObjectAccess::ForJSArrayOffset(i);
Add<HStoreNamedField>(object, access,
Add<HLoadNamedField>(boilerplate, access));
}
}
// Create an allocation site info if requested.
if (mode == TRACK_ALLOCATION_SITE) {
BuildCreateAllocationMemento(object, JSArray::kSize, allocation_site);
}
if (length > 0) {
HValue* boilerplate_elements = AddLoadElements(boilerplate);
HValue* object_elements;
if (IsFastDoubleElementsKind(kind)) {
HValue* elems_size = Add<HConstant>(FixedDoubleArray::SizeFor(length));
object_elements = Add<HAllocate>(elems_size, HType::JSArray(),
NOT_TENURED, FIXED_DOUBLE_ARRAY_TYPE);
} else {
HValue* elems_size = Add<HConstant>(FixedArray::SizeFor(length));
object_elements = Add<HAllocate>(elems_size, HType::JSArray(),
NOT_TENURED, FIXED_ARRAY_TYPE);
}
Add<HStoreNamedField>(object, HObjectAccess::ForElementsPointer(),
object_elements);
// Copy the elements array header.
for (int i = 0; i < FixedArrayBase::kHeaderSize; i += kPointerSize) {
HObjectAccess access = HObjectAccess::ForFixedArrayHeader(i);
Add<HStoreNamedField>(object_elements, access,
Add<HLoadNamedField>(boilerplate_elements, access));
}
// Copy the elements array contents.
// TODO(mstarzinger): Teach HGraphBuilder::BuildCopyElements to unfold
// copying loops with constant length up to a given boundary and use this
// helper here instead.
for (int i = 0; i < length; i++) {
HValue* key_constant = Add<HConstant>(i);
HInstruction* value = Add<HLoadKeyed>(boilerplate_elements, key_constant,
static_cast<HValue*>(NULL), kind);
Add<HStoreKeyed>(object_elements, key_constant, value, kind);
}
}
return object;
}
void HGraphBuilder::BuildCompareNil(
HValue* value,
Handle<Type> type,
HIfContinuation* continuation) {
IfBuilder if_nil(this);
bool some_case_handled = false;
bool some_case_missing = false;
if (type->Maybe(Type::Null())) {
if (some_case_handled) if_nil.Or();
if_nil.If<HCompareObjectEqAndBranch>(value, graph()->GetConstantNull());
some_case_handled = true;
} else {
some_case_missing = true;
}
if (type->Maybe(Type::Undefined())) {
if (some_case_handled) if_nil.Or();
if_nil.If<HCompareObjectEqAndBranch>(value,
graph()->GetConstantUndefined());
some_case_handled = true;
} else {
some_case_missing = true;
}
if (type->Maybe(Type::Undetectable())) {
if (some_case_handled) if_nil.Or();
if_nil.If<HIsUndetectableAndBranch>(value);
some_case_handled = true;
} else {
some_case_missing = true;
}
if (some_case_missing) {
if_nil.Then();
if_nil.Else();
if (type->NumClasses() == 1) {
BuildCheckHeapObject(value);
// For ICs, the map checked below is a sentinel map that gets replaced by
// the monomorphic map when the code is used as a template to generate a
// new IC. For optimized functions, there is no sentinel map, the map
// emitted below is the actual monomorphic map.
BuildCheckMap(value, type->Classes().Current());
} else {
if_nil.Deopt("Too many undetectable types");
}
}
if_nil.CaptureContinuation(continuation);
}
HValue* HGraphBuilder::BuildCreateAllocationMemento(HValue* previous_object,
int previous_object_size,
HValue* alloc_site) {
ASSERT(alloc_site != NULL);
HInnerAllocatedObject* alloc_memento = Add<HInnerAllocatedObject>(
previous_object, previous_object_size);
Handle<Map> alloc_memento_map =
isolate()->factory()->allocation_memento_map();
AddStoreMapConstant(alloc_memento, alloc_memento_map);
HObjectAccess access = HObjectAccess::ForAllocationMementoSite();
Add<HStoreNamedField>(alloc_memento, access, alloc_site);
return alloc_memento;
}
HInstruction* HGraphBuilder::BuildGetNativeContext() {
// Get the global context, then the native context
HInstruction* global_object = Add<HGlobalObject>();
HObjectAccess access = HObjectAccess::ForJSObjectOffset(
GlobalObject::kNativeContextOffset);
return Add<HLoadNamedField>(global_object, access);