67 #if V8_TARGET_ARCH_IA32
69 #elif V8_TARGET_ARCH_X64
71 #elif V8_TARGET_ARCH_ARM64
73 #elif V8_TARGET_ARCH_ARM
75 #elif V8_TARGET_ARCH_MIPS
78 #error Unsupported target architecture.
84 HBasicBlock::HBasicBlock(HGraph* graph)
85 : block_id_(graph->GetNextBlockID()),
87 phis_(4, graph->zone()),
91 loop_information_(
NULL),
92 predecessors_(2, graph->zone()),
94 dominated_blocks_(4, graph->zone()),
95 last_environment_(
NULL),
97 first_instruction_index_(-1),
98 last_instruction_index_(-1),
99 deleted_phis_(4, graph->zone()),
100 parent_loop_header_(
NULL),
101 inlined_entry_block_(
NULL),
102 is_inline_return_target_(
false),
104 dominates_loop_successors_(
false),
105 is_osr_entry_(
false) { }
108 Isolate* HBasicBlock::isolate()
const {
109 return graph_->isolate();
113 void HBasicBlock::MarkUnreachable() {
114 is_reachable_ =
false;
118 void HBasicBlock::AttachLoopInformation() {
120 loop_information_ =
new(zone()) HLoopInformation(
this, zone());
124 void HBasicBlock::DetachLoopInformation() {
126 loop_information_ =
NULL;
130 void HBasicBlock::AddPhi(HPhi* phi) {
132 phis_.Add(phi, zone());
137 void HBasicBlock::RemovePhi(HPhi* phi) {
138 ASSERT(phi->block() ==
this);
139 ASSERT(phis_.Contains(phi));
141 phis_.RemoveElement(phi);
146 void HBasicBlock::AddInstruction(HInstruction* instr,
147 HSourcePosition position) {
148 ASSERT(!IsStartBlock() || !IsFinished());
149 ASSERT(!instr->IsLinked());
152 if (!position.IsUnknown()) {
153 instr->set_position(position);
155 if (first_ ==
NULL) {
157 ASSERT(!last_environment()->ast_id().IsNone());
158 HBlockEntry* entry =
new(zone()) HBlockEntry();
159 entry->InitializeAsFirst(
this);
160 if (!position.IsUnknown()) {
161 entry->set_position(position);
163 ASSERT(!FLAG_hydrogen_track_positions ||
164 !graph()->
info()->IsOptimizing());
166 first_ = last_ = entry;
168 instr->InsertAfter(last_);
172 HPhi* HBasicBlock::AddNewPhi(
int merged_index) {
173 if (graph()->IsInsideNoSideEffectsScope()) {
174 merged_index = HPhi::kInvalidMergedIndex;
176 HPhi* phi =
new(zone()) HPhi(merged_index, zone());
182 HSimulate* HBasicBlock::CreateSimulate(BailoutId ast_id,
185 HEnvironment* environment = last_environment();
188 environment->closure()->shared()->VerifyBailoutId(ast_id));
190 int push_count = environment->push_count();
191 int pop_count = environment->pop_count();
194 new(zone()) HSimulate(ast_id, pop_count, zone(), removable);
196 instr->set_closure(environment->closure());
201 for (
int i = 0; i < push_count; ++i) {
202 instr->AddPushedValue(environment->ExpressionStackAt(i));
204 for (GrowableBitVector::Iterator it(environment->assigned_variables(),
208 int index = it.Current();
209 instr->AddAssignedValue(index, environment->Lookup(index));
211 environment->ClearHistory();
216 void HBasicBlock::Finish(HControlInstruction* end, HSourcePosition position) {
218 AddInstruction(end, position);
220 for (HSuccessorIterator it(end); !it.Done(); it.Advance()) {
221 it.Current()->RegisterPredecessor(
this);
226 void HBasicBlock::Goto(HBasicBlock* block,
227 HSourcePosition position,
228 FunctionState* state,
230 bool drop_extra = state !=
NULL &&
233 if (block->IsInlineReturnTarget()) {
234 HEnvironment* env = last_environment();
235 int argument_count = env->arguments_environment()->parameter_count();
236 AddInstruction(
new(zone())
237 HLeaveInlined(state->entry(), argument_count),
239 UpdateEnvironment(last_environment()->DiscardInlined(drop_extra));
243 HGoto* instr =
new(zone()) HGoto(block);
244 Finish(instr, position);
248 void HBasicBlock::AddLeaveInlined(HValue* return_value,
249 FunctionState* state,
250 HSourcePosition position) {
251 HBasicBlock* target = state->function_return();
254 ASSERT(target->IsInlineReturnTarget());
256 HEnvironment* env = last_environment();
257 int argument_count = env->arguments_environment()->parameter_count();
258 AddInstruction(
new(zone()) HLeaveInlined(state->entry(), argument_count),
260 UpdateEnvironment(last_environment()->DiscardInlined(drop_extra));
261 last_environment()->Push(return_value);
263 HGoto* instr =
new(zone()) HGoto(target);
264 Finish(instr, position);
268 void HBasicBlock::SetInitialEnvironment(HEnvironment* env) {
269 ASSERT(!HasEnvironment());
271 UpdateEnvironment(env);
275 void HBasicBlock::UpdateEnvironment(HEnvironment* env) {
276 last_environment_ = env;
277 graph()->update_maximum_environment_size(env->first_expression_index());
281 void HBasicBlock::SetJoinId(BailoutId ast_id) {
282 int length = predecessors_.length();
284 for (
int i = 0; i < length; i++) {
285 HBasicBlock* predecessor = predecessors_[i];
286 ASSERT(predecessor->end()->IsGoto());
287 HSimulate* simulate = HSimulate::cast(predecessor->end()->previous());
289 (predecessor->last_environment()->closure().is_null() ||
290 predecessor->last_environment()->closure()->shared()
291 ->VerifyBailoutId(ast_id)));
292 simulate->set_ast_id(ast_id);
293 predecessor->last_environment()->set_ast_id(ast_id);
298 bool HBasicBlock::Dominates(HBasicBlock* other)
const {
299 HBasicBlock* current = other->dominator();
300 while (current !=
NULL) {
301 if (current ==
this)
return true;
302 current = current->dominator();
308 bool HBasicBlock::EqualToOrDominates(HBasicBlock* other)
const {
309 if (
this == other)
return true;
310 return Dominates(other);
314 int HBasicBlock::LoopNestingDepth()
const {
315 const HBasicBlock* current =
this;
316 int result = (current->IsLoopHeader()) ? 1 : 0;
317 while (current->parent_loop_header() !=
NULL) {
318 current = current->parent_loop_header();
325 void HBasicBlock::PostProcessLoopHeader(IterationStatement* stmt) {
328 SetJoinId(stmt->EntryId());
329 if (predecessors()->length() == 1) {
331 DetachLoopInformation();
337 for (
int i = 1; i < predecessors()->length(); ++i) {
338 loop_information()->RegisterBackEdge(predecessors()->at(i));
343 void HBasicBlock::MarkSuccEdgeUnreachable(
int succ) {
345 HBasicBlock* succ_block = end()->SuccessorAt(succ);
347 ASSERT(succ_block->predecessors()->length() == 1);
348 succ_block->MarkUnreachable();
352 void HBasicBlock::RegisterPredecessor(HBasicBlock* pred) {
353 if (HasPredecessor()) {
358 HEnvironment* incoming_env = pred->last_environment();
359 if (IsLoopHeader()) {
360 ASSERT(phis()->length() == incoming_env->length());
361 for (
int i = 0; i < phis_.length(); ++i) {
362 phis_[i]->AddInput(incoming_env->values()->at(i));
365 last_environment()->AddIncomingEdge(
this, pred->last_environment());
367 }
else if (!HasEnvironment() && !IsFinished()) {
369 SetInitialEnvironment(pred->last_environment()->Copy());
372 predecessors_.Add(pred, zone());
376 void HBasicBlock::AddDominatedBlock(HBasicBlock* block) {
377 ASSERT(!dominated_blocks_.Contains(block));
381 while (index < dominated_blocks_.length() &&
382 dominated_blocks_[index]->block_id() < block->block_id()) {
385 dominated_blocks_.InsertAt(index, block, zone());
389 void HBasicBlock::AssignCommonDominator(HBasicBlock* other) {
390 if (dominator_ ==
NULL) {
392 other->AddDominatedBlock(
this);
393 }
else if (other->dominator() !=
NULL) {
394 HBasicBlock* first = dominator_;
395 HBasicBlock* second = other;
397 while (first != second) {
398 if (first->block_id() > second->block_id()) {
399 first = first->dominator();
401 second = second->dominator();
406 if (dominator_ != first) {
407 ASSERT(dominator_->dominated_blocks_.Contains(
this));
408 dominator_->dominated_blocks_.RemoveElement(
this);
410 first->AddDominatedBlock(
this);
416 void HBasicBlock::AssignLoopSuccessorDominators() {
426 HBasicBlock* last = loop_information()->GetLastBackEdge();
427 int outstanding_successors = 1;
429 MarkAsLoopSuccessorDominator();
430 for (
int j = block_id(); j <= last->block_id(); ++j) {
431 HBasicBlock* dominator_candidate = graph_->blocks()->at(j);
432 for (HPredecessorIterator it(dominator_candidate); !it.Done();
434 HBasicBlock* predecessor = it.Current();
436 if (predecessor->block_id() < dominator_candidate->block_id()) {
437 outstanding_successors--;
449 ASSERT(outstanding_successors >= 0);
450 HBasicBlock* parent_loop_header = dominator_candidate->parent_loop_header();
451 if (outstanding_successors == 0 &&
452 (parent_loop_header ==
this && !dominator_candidate->IsLoopHeader())) {
453 dominator_candidate->MarkAsLoopSuccessorDominator();
455 HControlInstruction* end = dominator_candidate->end();
456 for (HSuccessorIterator it(end); !it.Done(); it.Advance()) {
457 HBasicBlock* successor = it.Current();
460 if (successor->block_id() > dominator_candidate->block_id() &&
461 successor->block_id() <= last->block_id()) {
463 ASSERT(successor->block_id() > dominator_candidate->block_id() ||
464 successor->IsLoopHeader());
465 outstanding_successors++;
472 int HBasicBlock::PredecessorIndexOf(HBasicBlock* predecessor)
const {
473 for (
int i = 0; i < predecessors_.length(); ++i) {
474 if (predecessors_[i] == predecessor)
return i;
482 void HBasicBlock::Verify() {
488 if (predecessors_.length() > 1) {
489 for (
int i = 0; i < predecessors_.length(); ++i) {
490 ASSERT(predecessors_[i]->end()->SecondSuccessor() ==
NULL);
497 void HLoopInformation::RegisterBackEdge(HBasicBlock* block) {
498 this->back_edges_.Add(block, block->zone());
503 HBasicBlock* HLoopInformation::GetLastBackEdge()
const {
505 HBasicBlock* result =
NULL;
506 for (
int i = 0; i < back_edges_.length(); ++i) {
507 HBasicBlock* cur = back_edges_[i];
508 if (cur->block_id() > max_id) {
509 max_id = cur->block_id();
517 void HLoopInformation::AddBlock(HBasicBlock* block) {
518 if (block == loop_header())
return;
519 if (block->parent_loop_header() == loop_header())
return;
520 if (block->parent_loop_header() !=
NULL) {
521 AddBlock(block->parent_loop_header());
523 block->set_parent_loop_header(loop_header());
524 blocks_.Add(block, block->zone());
525 for (
int i = 0; i < block->predecessors()->length(); ++i) {
526 AddBlock(block->predecessors()->at(i));
541 ReachabilityAnalyzer(HBasicBlock* entry_block,
543 HBasicBlock* dont_visit)
545 stack_(16, entry_block->zone()),
546 reachable_(block_count, entry_block->zone()),
547 dont_visit_(dont_visit) {
548 PushBlock(entry_block);
552 int visited_count()
const {
return visited_count_; }
553 const BitVector* reachable()
const {
return &reachable_; }
556 void PushBlock(HBasicBlock* block) {
557 if (block !=
NULL && block != dont_visit_ &&
558 !reachable_.Contains(block->block_id())) {
559 reachable_.Add(block->block_id());
560 stack_.Add(block, block->zone());
566 while (!stack_.is_empty()) {
567 HControlInstruction* end = stack_.RemoveLast()->end();
568 for (HSuccessorIterator it(end); !it.Done(); it.Advance()) {
569 PushBlock(it.Current());
575 ZoneList<HBasicBlock*> stack_;
576 BitVector reachable_;
577 HBasicBlock* dont_visit_;
581 void HGraph::Verify(
bool do_full_verify)
const {
582 Heap::RelocationLock relocation_lock(isolate()->heap());
585 for (
int i = 0; i < blocks_.length(); i++) {
586 HBasicBlock* block = blocks_.at(i);
592 HInstruction* current = block->first();
593 ASSERT(current !=
NULL && current->IsBlockEntry());
594 while (current !=
NULL) {
595 ASSERT((current->next() ==
NULL) == current->IsControlInstruction());
596 ASSERT(current->block() == block);
598 current = current->next();
602 HBasicBlock* first = block->end()->FirstSuccessor();
603 HBasicBlock* second = block->end()->SecondSuccessor();
608 ASSERT(first->predecessors()->Contains(block));
609 if (second !=
NULL) {
610 ASSERT(second->predecessors()->Contains(block));
615 for (
int j = 0; j < block->phis()->length(); j++) {
616 HPhi* phi = block->phis()->at(j);
622 if (block->predecessors()->length() >= 2) {
624 block->predecessors()->first()->last_environment()->ast_id();
625 for (
int k = 0; k < block->predecessors()->length(); k++) {
626 HBasicBlock* predecessor = block->predecessors()->at(k);
627 ASSERT(predecessor->end()->IsGoto() ||
628 predecessor->end()->IsDeoptimize());
629 ASSERT(predecessor->last_environment()->ast_id() == id);
635 ASSERT(blocks_.at(0)->predecessors()->is_empty());
637 if (do_full_verify) {
639 ReachabilityAnalyzer analyzer(entry_block_, blocks_.length(),
NULL);
640 ASSERT(analyzer.visited_count() == blocks_.length());
646 for (
int i = 0; i < blocks_.length(); ++i) {
647 HBasicBlock* block = blocks_.at(i);
648 if (block->dominator() ==
NULL) {
653 ReachabilityAnalyzer dominator_analyzer(entry_block_,
656 ASSERT(!dominator_analyzer.reachable()->Contains(block->block_id()));
665 HConstant* HGraph::GetConstant(SetOncePointer<HConstant>* pointer,
667 if (!pointer->is_set()) {
670 HConstant* constant = HConstant::New(zone(),
NULL, value);
671 constant->InsertAfter(entry_block()->first());
672 pointer->set(constant);
675 return ReinsertConstantIfNecessary(pointer->get());
679 HConstant* HGraph::ReinsertConstantIfNecessary(HConstant* constant) {
680 if (!constant->IsLinked()) {
683 constant->InsertAfter(entry_block()->first());
689 HConstant* HGraph::GetConstant0() {
690 return GetConstant(&constant_0_, 0);
694 HConstant* HGraph::GetConstant1() {
695 return GetConstant(&constant_1_, 1);
699 HConstant* HGraph::GetConstantMinus1() {
700 return GetConstant(&constant_minus1_, -1);
704 #define DEFINE_GET_CONSTANT(Name, name, htype, boolean_value) \
705 HConstant* HGraph::GetConstant##Name() { \
706 if (!constant_##name##_.is_set()) { \
707 HConstant* constant = new(zone()) HConstant( \
708 Unique<Object>::CreateImmovable(isolate()->factory()->name##_value()), \
709 Representation::Tagged(), \
715 constant->InsertAfter(entry_block()->first()); \
716 constant_##name##_.set(constant); \
718 return ReinsertConstantIfNecessary(constant_##name##_.get()); \
729 #undef DEFINE_GET_CONSTANT
731 #define DEFINE_IS_CONSTANT(Name, name) \
732 bool HGraph::IsConstant##Name(HConstant* constant) { \
733 return constant_##name##_.is_set() && constant == constant_##name##_.get(); \
737 DEFINE_IS_CONSTANT(1, 1)
738 DEFINE_IS_CONSTANT(Minus1, minus1)
739 DEFINE_IS_CONSTANT(True, true)
740 DEFINE_IS_CONSTANT(False, false)
741 DEFINE_IS_CONSTANT(Hole, the_hole)
742 DEFINE_IS_CONSTANT(Null, null)
744 #undef DEFINE_IS_CONSTANT
747 HConstant* HGraph::GetInvalidContext() {
748 return GetConstant(&constant_invalid_context_, 0xFFFFC0C7);
752 bool HGraph::IsStandardConstant(HConstant* constant) {
753 if (IsConstantUndefined(constant))
return true;
754 if (IsConstant0(constant))
return true;
755 if (IsConstant1(constant))
return true;
756 if (IsConstantMinus1(constant))
return true;
757 if (IsConstantTrue(constant))
return true;
758 if (IsConstantFalse(constant))
return true;
759 if (IsConstantHole(constant))
return true;
760 if (IsConstantNull(constant))
return true;
765 HGraphBuilder::IfBuilder::IfBuilder(HGraphBuilder* builder)
774 needs_compare_(true),
775 pending_merge_block_(false),
776 split_edge_merge_block_(
NULL),
777 merge_at_join_blocks_(
NULL),
778 normal_merge_at_join_block_count_(0),
779 deopt_merge_at_join_block_count_(0) {
780 HEnvironment* env = builder->environment();
781 first_true_block_ = builder->CreateBasicBlock(env->Copy());
782 first_false_block_ = builder->CreateBasicBlock(env->Copy());
786 HGraphBuilder::IfBuilder::IfBuilder(
787 HGraphBuilder* builder,
788 HIfContinuation* continuation)
797 needs_compare_(false),
798 pending_merge_block_(false),
799 first_true_block_(
NULL),
800 first_false_block_(
NULL),
801 split_edge_merge_block_(
NULL),
802 merge_at_join_blocks_(
NULL),
803 normal_merge_at_join_block_count_(0),
804 deopt_merge_at_join_block_count_(0) {
805 continuation->Continue(&first_true_block_,
806 &first_false_block_);
810 HControlInstruction* HGraphBuilder::IfBuilder::AddCompare(
811 HControlInstruction* compare) {
812 ASSERT(did_then_ == did_else_);
820 pending_merge_block_ =
false;
821 split_edge_merge_block_ =
NULL;
822 HEnvironment* env = builder_->environment();
823 first_true_block_ = builder_->CreateBasicBlock(env->Copy());
824 first_false_block_ = builder_->CreateBasicBlock(env->Copy());
826 if (split_edge_merge_block_ !=
NULL) {
827 HEnvironment* env = first_false_block_->last_environment();
828 HBasicBlock* split_edge =
829 builder_->CreateBasicBlock(env->Copy());
831 compare->SetSuccessorAt(0, split_edge);
832 compare->SetSuccessorAt(1, first_false_block_);
834 compare->SetSuccessorAt(0, first_true_block_);
835 compare->SetSuccessorAt(1, split_edge);
837 builder_->GotoNoSimulate(split_edge, split_edge_merge_block_);
839 compare->SetSuccessorAt(0, first_true_block_);
840 compare->SetSuccessorAt(1, first_false_block_);
842 builder_->FinishCurrentBlock(compare);
843 needs_compare_ =
false;
848 void HGraphBuilder::IfBuilder::Or() {
852 HEnvironment* env = first_false_block_->last_environment();
853 if (split_edge_merge_block_ ==
NULL) {
854 split_edge_merge_block_ =
855 builder_->CreateBasicBlock(env->Copy());
856 builder_->GotoNoSimulate(first_true_block_, split_edge_merge_block_);
857 first_true_block_ = split_edge_merge_block_;
859 builder_->set_current_block(first_false_block_);
860 first_false_block_ = builder_->CreateBasicBlock(env->Copy());
864 void HGraphBuilder::IfBuilder::And() {
868 HEnvironment* env = first_false_block_->last_environment();
869 if (split_edge_merge_block_ ==
NULL) {
870 split_edge_merge_block_ = builder_->CreateBasicBlock(env->Copy());
871 builder_->GotoNoSimulate(first_false_block_, split_edge_merge_block_);
872 first_false_block_ = split_edge_merge_block_;
874 builder_->set_current_block(first_true_block_);
875 first_true_block_ = builder_->CreateBasicBlock(env->Copy());
879 void HGraphBuilder::IfBuilder::CaptureContinuation(
880 HIfContinuation* continuation) {
885 HBasicBlock* true_block =
NULL;
886 HBasicBlock* false_block =
NULL;
887 Finish(&true_block, &false_block);
890 continuation->Capture(true_block, false_block);
892 builder_->set_current_block(
NULL);
897 void HGraphBuilder::IfBuilder::JoinContinuation(HIfContinuation* continuation) {
901 HBasicBlock* true_block =
NULL;
902 HBasicBlock* false_block =
NULL;
903 Finish(&true_block, &false_block);
904 merge_at_join_blocks_ =
NULL;
905 if (true_block !=
NULL && !true_block->IsFinished()) {
906 ASSERT(continuation->IsTrueReachable());
907 builder_->GotoNoSimulate(true_block, continuation->true_branch());
909 if (false_block !=
NULL && !false_block->IsFinished()) {
910 ASSERT(continuation->IsFalseReachable());
911 builder_->GotoNoSimulate(false_block, continuation->false_branch());
918 void HGraphBuilder::IfBuilder::Then() {
922 if (needs_compare_) {
927 HConstant* constant_false = builder_->graph()->GetConstantFalse();
928 ToBooleanStub::Types boolean_type = ToBooleanStub::Types();
929 boolean_type.Add(ToBooleanStub::BOOLEAN);
930 HBranch* branch = builder()->New<HBranch>(
931 constant_false, boolean_type, first_true_block_, first_false_block_);
932 builder_->FinishCurrentBlock(branch);
934 builder_->set_current_block(first_true_block_);
935 pending_merge_block_ =
true;
939 void HGraphBuilder::IfBuilder::Else() {
943 AddMergeAtJoinBlock(
false);
944 builder_->set_current_block(first_false_block_);
945 pending_merge_block_ =
true;
950 void HGraphBuilder::IfBuilder::Deopt(
const char* reason) {
952 builder_->Add<HDeoptimize>(reason, Deoptimizer::EAGER);
953 AddMergeAtJoinBlock(
true);
957 void HGraphBuilder::IfBuilder::Return(HValue* value) {
958 HValue* parameter_count = builder_->graph()->GetConstantMinus1();
959 builder_->FinishExitCurrentBlock(
960 builder_->New<HReturn>(value, parameter_count));
961 AddMergeAtJoinBlock(
false);
965 void HGraphBuilder::IfBuilder::AddMergeAtJoinBlock(
bool deopt) {
966 if (!pending_merge_block_)
return;
967 HBasicBlock* block = builder_->current_block();
968 ASSERT(block ==
NULL || !block->IsFinished());
969 MergeAtJoinBlock* record =
970 new(builder_->zone()) MergeAtJoinBlock(block, deopt,
971 merge_at_join_blocks_);
972 merge_at_join_blocks_ = record;
976 normal_merge_at_join_block_count_++;
978 deopt_merge_at_join_block_count_++;
981 builder_->set_current_block(
NULL);
982 pending_merge_block_ =
false;
986 void HGraphBuilder::IfBuilder::Finish() {
991 AddMergeAtJoinBlock(
false);
994 AddMergeAtJoinBlock(
false);
1000 void HGraphBuilder::IfBuilder::Finish(HBasicBlock** then_continuation,
1001 HBasicBlock** else_continuation) {
1004 MergeAtJoinBlock* else_record = merge_at_join_blocks_;
1005 if (else_continuation !=
NULL) {
1006 *else_continuation = else_record->block_;
1008 MergeAtJoinBlock* then_record = else_record->next_;
1009 if (then_continuation !=
NULL) {
1010 *then_continuation = then_record->block_;
1016 void HGraphBuilder::IfBuilder::End() {
1017 if (captured_)
return;
1020 int total_merged_blocks = normal_merge_at_join_block_count_ +
1021 deopt_merge_at_join_block_count_;
1022 ASSERT(total_merged_blocks >= 1);
1023 HBasicBlock* merge_block = total_merged_blocks == 1
1024 ?
NULL : builder_->graph()->CreateBasicBlock();
1028 MergeAtJoinBlock* current = merge_at_join_blocks_;
1029 while (current !=
NULL) {
1030 if (!current->deopt_ && current->block_ !=
NULL) {
1034 if (total_merged_blocks == 1) {
1035 builder_->set_current_block(current->block_);
1038 builder_->GotoNoSimulate(current->block_, merge_block);
1040 current = current->next_;
1044 current = merge_at_join_blocks_;
1045 while (current !=
NULL) {
1046 if (current->deopt_ && current->block_ !=
NULL) {
1047 current->block_->FinishExit(
1048 HAbnormalExit::New(builder_->zone(),
NULL),
1049 HSourcePosition::Unknown());
1051 current = current->next_;
1053 builder_->set_current_block(merge_block);
1057 HGraphBuilder::LoopBuilder::LoopBuilder(HGraphBuilder* builder,
1059 LoopBuilder::Direction direction)
1060 : builder_(builder),
1062 direction_(direction),
1064 header_block_ = builder->CreateLoopHeaderBlock();
1067 exit_trampoline_block_ =
NULL;
1068 increment_amount_ = builder_->graph()->GetConstant1();
1072 HGraphBuilder::LoopBuilder::LoopBuilder(HGraphBuilder* builder,
1074 LoopBuilder::Direction direction,
1075 HValue* increment_amount)
1076 : builder_(builder),
1078 direction_(direction),
1080 header_block_ = builder->CreateLoopHeaderBlock();
1083 exit_trampoline_block_ =
NULL;
1084 increment_amount_ = increment_amount;
1088 HValue* HGraphBuilder::LoopBuilder::BeginBody(
1090 HValue* terminating,
1091 Token::Value token) {
1092 HEnvironment* env = builder_->environment();
1093 phi_ = header_block_->AddNewPhi(env->values()->length());
1094 phi_->AddInput(initial);
1096 builder_->GotoNoSimulate(header_block_);
1098 HEnvironment* body_env = env->Copy();
1099 HEnvironment* exit_env = env->Copy();
1103 body_block_ = builder_->CreateBasicBlock(body_env);
1104 exit_block_ = builder_->CreateBasicBlock(exit_env);
1106 builder_->set_current_block(header_block_);
1108 builder_->FinishCurrentBlock(builder_->New<HCompareNumericAndBranch>(
1109 phi_, terminating, token, body_block_, exit_block_));
1111 builder_->set_current_block(body_block_);
1112 if (direction_ == kPreIncrement || direction_ == kPreDecrement) {
1113 HValue* one = builder_->graph()->GetConstant1();
1114 if (direction_ == kPreIncrement) {
1115 increment_ = HAdd::New(zone(), context_, phi_, one);
1117 increment_ = HSub::New(zone(), context_, phi_, one);
1119 increment_->ClearFlag(HValue::kCanOverflow);
1120 builder_->AddInstruction(increment_);
1129 if (exit_trampoline_block_ ==
NULL) {
1131 HEnvironment* env = exit_block_->last_environment()->Copy();
1132 exit_trampoline_block_ = builder_->CreateBasicBlock(env);
1133 builder_->GotoNoSimulate(exit_block_, exit_trampoline_block_);
1136 builder_->GotoNoSimulate(exit_trampoline_block_);
1137 builder_->set_current_block(
NULL);
1141 void HGraphBuilder::LoopBuilder::EndBody() {
1144 if (direction_ == kPostIncrement || direction_ == kPostDecrement) {
1145 if (direction_ == kPostIncrement) {
1146 increment_ = HAdd::New(zone(), context_, phi_, increment_amount_);
1148 increment_ = HSub::New(zone(), context_, phi_, increment_amount_);
1150 increment_->ClearFlag(HValue::kCanOverflow);
1151 builder_->AddInstruction(increment_);
1155 builder_->environment()->Push(increment_);
1156 HBasicBlock* last_block = builder_->current_block();
1157 builder_->GotoNoSimulate(last_block, header_block_);
1158 header_block_->loop_information()->RegisterBackEdge(last_block);
1160 if (exit_trampoline_block_ !=
NULL) {
1161 builder_->set_current_block(exit_trampoline_block_);
1163 builder_->set_current_block(exit_block_);
1169 HGraph* HGraphBuilder::CreateGraph() {
1170 graph_ =
new(zone()) HGraph(info_);
1171 if (FLAG_hydrogen_stats) isolate()->GetHStatistics()->Initialize(info_);
1172 CompilationPhase phase(
"H_Block building", info_);
1173 set_current_block(graph()->entry_block());
1174 if (!BuildGraph())
return NULL;
1175 graph()->FinalizeUniqueness();
1182 ASSERT(!FLAG_hydrogen_track_positions ||
1183 !position_.IsUnknown() ||
1184 !info_->IsOptimizing());
1185 current_block()->AddInstruction(instr, source_position());
1186 if (graph()->IsInsideNoSideEffectsScope()) {
1187 instr->
SetFlag(HValue::kHasNoObservableSideEffects);
1194 ASSERT(!FLAG_hydrogen_track_positions ||
1195 !info_->IsOptimizing() ||
1196 !position_.IsUnknown());
1197 current_block()->Finish(last, source_position());
1198 if (last->IsReturn() || last->IsAbnormalExit()) {
1199 set_current_block(
NULL);
1205 ASSERT(!FLAG_hydrogen_track_positions || !info_->IsOptimizing() ||
1206 !position_.IsUnknown());
1207 current_block()->FinishExit(instruction, source_position());
1208 if (instruction->IsReturn() || instruction->IsAbnormalExit()) {
1209 set_current_block(
NULL);
1215 if (FLAG_native_code_counters && counter->
Enabled()) {
1216 HValue* reference = Add<HConstant>(ExternalReference(counter));
1217 HValue* old_value = Add<HLoadNamedField>(
1218 reference,
static_cast<HValue*
>(
NULL), HObjectAccess::ForCounter());
1219 HValue* new_value = AddUncasted<HAdd>(old_value, graph()->GetConstant1());
1220 new_value->
ClearFlag(HValue::kCanOverflow);
1221 Add<HStoreNamedField>(reference, HObjectAccess::ForCounter(),
1230 ASSERT(!graph()->IsInsideNoSideEffectsScope());
1231 current_block()->AddNewSimulate(
id, source_position(), removable);
1235 HBasicBlock* HGraphBuilder::CreateBasicBlock(HEnvironment* env) {
1236 HBasicBlock* b = graph()->CreateBasicBlock();
1237 b->SetInitialEnvironment(env);
1242 HBasicBlock* HGraphBuilder::CreateLoopHeaderBlock() {
1243 HBasicBlock* header = graph()->CreateBasicBlock();
1244 HEnvironment* entry_env = environment()->CopyAsLoopHeader(header);
1245 header->SetInitialEnvironment(entry_env);
1246 header->AttachLoopInformation();
1252 if (obj->
type().IsHeapObject())
return obj;
1253 return Add<HCheckHeapObject>(
obj);
1257 void HGraphBuilder::FinishExitWithHardDeoptimization(
const char* reason) {
1258 Add<HDeoptimize>(reason, Deoptimizer::EAGER);
1259 FinishExitCurrentBlock(New<HAbnormalExit>());
1264 return Add<HCheckMaps>(
obj,
map, top_info());
1269 if (!string->
type().IsString()) {
1270 ASSERT(!string->IsConstant() ||
1271 !HConstant::cast(
string)->HasStringValue());
1272 BuildCheckHeapObject(
string);
1273 return Add<HCheckInstanceType>(string, HCheckInstanceType::IS_STRING);
1280 if (object->
type().IsJSObject())
return object;
1281 if (function->IsConstant() &&
1282 HConstant::cast(
function)->handle(isolate())->IsJSFunction()) {
1284 HConstant::cast(
function)->
handle(isolate()));
1288 return Add<HWrapReceiver>(object,
function);
1292 HValue* HGraphBuilder::BuildCheckForCapacityGrow(
1300 IfBuilder length_checker(
this);
1305 length_checker.Then();
1307 HValue* current_capacity = AddLoadFixedArrayLength(elements);
1309 IfBuilder capacity_checker(
this);
1313 capacity_checker.Then();
1315 HValue* max_gap = Add<HConstant>(
static_cast<int32_t>(JSObject::kMaxGap));
1316 HValue* max_capacity = AddUncasted<HAdd>(current_capacity, max_gap);
1317 IfBuilder key_checker(
this);
1320 key_checker.ElseDeopt(
"Key out of capacity range");
1323 HValue* new_capacity = BuildNewElementsCapacity(key);
1324 HValue* new_elements = BuildGrowElementsCapacity(
object, elements,
1328 environment()->Push(new_elements);
1329 capacity_checker.Else();
1331 environment()->Push(elements);
1332 capacity_checker.End();
1335 HValue* new_length = AddUncasted<HAdd>(key, graph_->GetConstant1());
1336 new_length->
ClearFlag(HValue::kCanOverflow);
1338 Add<HStoreNamedField>(object, HObjectAccess::ForArrayLength(kind),
1343 HValue* checked_elements = environment()->Top();
1346 Add<HStoreKeyed>(checked_elements, key, graph()->GetConstant0(), kind);
1349 length_checker.Else();
1350 Add<HBoundsCheck>(key, length);
1352 environment()->Push(elements);
1353 length_checker.End();
1355 return environment()->Pop();
1363 Factory* factory = isolate()->factory();
1365 IfBuilder cow_checker(
this);
1367 cow_checker.If<HCompareMap>(elements, factory->fixed_cow_array_map());
1370 HValue* capacity = AddLoadFixedArrayLength(elements);
1372 HValue* new_elements = BuildGrowElementsCapacity(
object, elements, kind,
1373 kind, length, capacity);
1375 environment()->Push(new_elements);
1379 environment()->Push(elements);
1383 return environment()->Pop();
1387 void HGraphBuilder::BuildTransitionElementsKind(
HValue*
object,
1396 Add<HTrapAllocationMemento>(object);
1403 isolate()->factory()->empty_fixed_array());
1405 IfBuilder if_builder(
this);
1411 HInstruction* elements_length = AddLoadFixedArrayLength(elements);
1414 ? Add<HLoadNamedField>(object,
static_cast<HValue*
>(
NULL),
1415 HObjectAccess::ForArrayLength(from_kind))
1418 BuildGrowElementsCapacity(
object, elements, from_kind, to_kind,
1419 array_length, elements_length);
1424 Add<HStoreNamedField>(object, HObjectAccess::ForMap(),
map);
1428 HValue* HGraphBuilder::BuildUncheckedDictionaryElementLoadHelper(
1433 int current_probe) {
1434 if (current_probe == kNumberDictionaryProbes) {
1438 int32_t offset = SeededNumberDictionary::GetProbeOffset(current_probe);
1439 HValue* raw_index = (current_probe == 0)
1441 : AddUncasted<HAdd>(hash, Add<HConstant>(offset));
1442 raw_index = AddUncasted<HBitwise>(Token::BIT_AND, raw_index, mask);
1443 int32_t entry_size = SeededNumberDictionary::kEntrySize;
1444 raw_index = AddUncasted<HMul>(raw_index, Add<HConstant>(entry_size));
1445 raw_index->ClearFlag(HValue::kCanOverflow);
1447 int32_t base_offset = SeededNumberDictionary::kElementsStartIndex;
1448 HValue* key_index = AddUncasted<HAdd>(raw_index, Add<HConstant>(base_offset));
1449 key_index->ClearFlag(HValue::kCanOverflow);
1451 HValue* candidate_key = Add<HLoadKeyed>(elements, key_index,
1452 static_cast<HValue*
>(
NULL),
1455 IfBuilder key_compare(
this);
1456 key_compare.IfNot<HCompareObjectEqAndBranch>(key, candidate_key);
1460 HValue* result = BuildUncheckedDictionaryElementLoadHelper(
1461 elements, key, hash, mask, current_probe + 1);
1462 if (result ==
NULL) {
1463 key_compare.Deopt(
"probes exhausted in keyed load dictionary lookup");
1464 result = graph()->GetConstantUndefined();
1473 HValue* details_index = AddUncasted<HAdd>(
1474 raw_index, Add<HConstant>(base_offset + 2));
1475 details_index->ClearFlag(HValue::kCanOverflow);
1477 HValue* details = Add<HLoadKeyed>(elements, details_index,
1478 static_cast<HValue*
>(
NULL),
1480 IfBuilder details_compare(
this);
1481 details_compare.If<HCompareNumericAndBranch>(details,
1482 graph()->GetConstant0(),
1484 details_compare.ThenDeopt(
"keyed load dictionary element not fast case");
1486 details_compare.Else();
1490 HValue* result_index = AddUncasted<HAdd>(
1491 raw_index, Add<HConstant>(base_offset + 1));
1492 result_index->ClearFlag(HValue::kCanOverflow);
1494 Push(Add<HLoadKeyed>(elements, result_index,
1495 static_cast<HValue*>(
NULL),
1498 details_compare.End();
1507 int32_t seed_value =
static_cast<uint32_t
>(isolate()->heap()->HashSeed());
1508 HValue* seed = Add<HConstant>(seed_value);
1509 HValue* hash = AddUncasted<HBitwise>(Token::BIT_XOR, index, seed);
1512 HValue* shifted_hash = AddUncasted<HShl>(hash, Add<HConstant>(15));
1513 HValue* not_hash = AddUncasted<HBitwise>(Token::BIT_XOR, hash,
1514 graph()->GetConstantMinus1());
1515 hash = AddUncasted<HAdd>(shifted_hash, not_hash);
1518 shifted_hash = AddUncasted<HShr>(hash, Add<HConstant>(12));
1519 hash = AddUncasted<HBitwise>(Token::BIT_XOR, hash, shifted_hash);
1522 shifted_hash = AddUncasted<HShl>(hash, Add<HConstant>(2));
1523 hash = AddUncasted<HAdd>(hash, shifted_hash);
1526 shifted_hash = AddUncasted<HShr>(hash, Add<HConstant>(4));
1527 hash = AddUncasted<HBitwise>(Token::BIT_XOR, hash, shifted_hash);
1530 hash = AddUncasted<HMul>(hash, Add<HConstant>(2057));
1531 hash->ClearFlag(HValue::kCanOverflow);
1534 shifted_hash = AddUncasted<HShr>(hash, Add<HConstant>(16));
1535 return AddUncasted<HBitwise>(Token::BIT_XOR, hash, shifted_hash);
1539 HValue* HGraphBuilder::BuildUncheckedDictionaryElementLoad(
HValue* receiver,
1541 HValue* elements = AddLoadElements(receiver);
1543 HValue* hash = BuildElementIndexHash(key);
1545 HValue* capacity = Add<HLoadKeyed>(
1547 Add<HConstant>(NameDictionary::kCapacityIndex),
1548 static_cast<HValue*>(
NULL),
1551 HValue* mask = AddUncasted<HSub>(capacity, graph()->GetConstant1());
1553 mask->ClearFlag(HValue::kCanOverflow);
1555 return BuildUncheckedDictionaryElementLoadHelper(elements, key,
1563 NoObservableSideEffectsScope scope(
this);
1568 size = AddUncasted<HAdd>(
size, Add<HConstant>(
static_cast<int32_t>(
1569 JSRegExpResult::kSize + FixedArray::kHeaderSize)));
1572 Add<HBoundsCheck>(
size, Add<HConstant>(Page::kMaxRegularHeapObjectSize));
1575 HValue* result = Add<HAllocate>(
1579 HValue* elements = Add<HInnerAllocatedObject>(
1580 result, Add<HConstant>(JSRegExpResult::kSize));
1583 HValue* global_object = Add<HLoadNamedField>(
1585 HObjectAccess::ForContextSlot(Context::GLOBAL_OBJECT_INDEX));
1586 HValue* native_context = Add<HLoadNamedField>(
1588 HObjectAccess::ForGlobalObjectNativeContext());
1589 AddStoreMapNoWriteBarrier(result, Add<HLoadNamedField>(
1590 native_context, static_cast<HValue*>(
NULL),
1591 HObjectAccess::ForContextSlot(Context::REGEXP_RESULT_MAP_INDEX)));
1592 Add<HStoreNamedField>(
1593 result, HObjectAccess::ForJSArrayOffset(JSArray::kPropertiesOffset),
1594 Add<HConstant>(isolate()->factory()->empty_fixed_array()));
1595 Add<HStoreNamedField>(
1596 result, HObjectAccess::ForJSArrayOffset(JSArray::kElementsOffset),
1598 Add<HStoreNamedField>(
1599 result, HObjectAccess::ForJSArrayOffset(JSArray::kLengthOffset), length);
1602 Add<HStoreNamedField>(
1603 result, HObjectAccess::ForJSArrayOffset(JSRegExpResult::kIndexOffset),
1605 Add<HStoreNamedField>(
1606 result, HObjectAccess::ForJSArrayOffset(JSRegExpResult::kInputOffset),
1610 AddStoreMapConstantNoWriteBarrier(elements,
1611 isolate()->factory()->fixed_array_map());
1612 Add<HStoreNamedField>(elements, HObjectAccess::ForFixedArrayLength(), length);
1615 LoopBuilder loop(
this, context(), LoopBuilder::kPostIncrement);
1616 index = loop.BeginBody(graph()->GetConstant0(), length, Token::LT);
1618 Add<HStoreKeyed>(elements, index, graph()->GetConstantUndefined(),
1628 NoObservableSideEffectsScope scope(
this);
1631 if (object->IsConstant() && HConstant::cast(
object)->HasNumberValue()) {
1632 Handle<Object> number = HConstant::cast(
object)->handle(isolate());
1633 Handle<String> result = isolate()->factory()->NumberToString(number);
1634 return Add<HConstant>(result);
1638 HIfContinuation found(graph()->CreateBasicBlock(),
1639 graph()->CreateBasicBlock());
1642 HValue* number_string_cache =
1643 Add<HLoadRoot>(Heap::kNumberStringCacheRootIndex);
1647 HValue* mask = AddLoadFixedArrayLength(number_string_cache);
1648 mask->set_type(HType::Smi());
1649 mask = AddUncasted<HSar>(mask, graph()->GetConstant1());
1650 mask = AddUncasted<HSub>(mask, graph()->GetConstant1());
1653 IfBuilder if_objectissmi(
this);
1654 if_objectissmi.If<HIsSmiAndBranch>(object);
1655 if_objectissmi.Then();
1658 HValue* hash = AddUncasted<HBitwise>(Token::BIT_AND, object, mask);
1661 HValue* key_index = AddUncasted<HShl>(hash, graph()->GetConstant1());
1662 HValue* key = Add<HLoadKeyed>(number_string_cache, key_index,
1667 IfBuilder if_objectiskey(
this);
1669 if_objectiskey.Then();
1674 if_objectiskey.JoinContinuation(&found);
1676 if_objectissmi.Else();
1678 if (type->
Is(Type::SignedSmall())) {
1679 if_objectissmi.Deopt(
"Expected smi");
1682 IfBuilder if_objectisnumber(
this);
1683 HValue* objectisnumber = if_objectisnumber.If<HCompareMap>(
1684 object, isolate()->factory()->heap_number_map());
1685 if_objectisnumber.Then();
1688 HValue* low = Add<HLoadNamedField>(
1689 object, objectisnumber,
1690 HObjectAccess::ForHeapNumberValueLowestBits());
1691 HValue* high = Add<HLoadNamedField>(
1692 object, objectisnumber,
1693 HObjectAccess::ForHeapNumberValueHighestBits());
1694 HValue* hash = AddUncasted<HBitwise>(Token::BIT_XOR, low, high);
1695 hash = AddUncasted<HBitwise>(Token::BIT_AND, hash, mask);
1698 HValue* key_index = AddUncasted<HShl>(hash, graph()->GetConstant1());
1699 HValue* key = Add<HLoadKeyed>(number_string_cache, key_index,
1705 IfBuilder if_keyisnotsmi(
this);
1706 HValue* keyisnotsmi = if_keyisnotsmi.IfNot<HIsSmiAndBranch>(key);
1707 if_keyisnotsmi.Then();
1710 IfBuilder if_keyeqobject(
this);
1712 Add<HLoadNamedField>(key, keyisnotsmi,
1713 HObjectAccess::ForHeapNumberValue()),
1714 Add<HLoadNamedField>(
object, objectisnumber,
1715 HObjectAccess::ForHeapNumberValue()),
1717 if_keyeqobject.Then();
1722 if_keyeqobject.JoinContinuation(&found);
1724 if_keyisnotsmi.JoinContinuation(&found);
1726 if_objectisnumber.Else();
1728 if (type->
Is(Type::Number())) {
1729 if_objectisnumber.Deopt(
"Expected heap number");
1732 if_objectisnumber.JoinContinuation(&found);
1735 if_objectissmi.JoinContinuation(&found);
1738 IfBuilder if_found(
this, &found);
1742 AddIncrementCounter(isolate()->counters()->number_to_string_native());
1745 HValue* key_index = Pop();
1746 HValue* value_index = AddUncasted<HAdd>(key_index, graph()->GetConstant1());
1747 Push(Add<HLoadKeyed>(number_string_cache, value_index,
1748 static_cast<HValue*>(
NULL),
1754 Add<HPushArgument>(object);
1755 Push(Add<HCallRuntime>(
1756 isolate()->factory()->empty_string(),
1757 Runtime::FunctionForId(Runtime::kHiddenNumberToStringSkipCache),
1766 HAllocate* HGraphBuilder::BuildAllocate(
1770 HAllocationMode allocation_mode) {
1773 if (allocation_mode.CreateAllocationMementos()) {
1774 size = AddUncasted<HAdd>(
size, Add<HConstant>(AllocationMemento::kSize));
1775 size->ClearFlag(HValue::kCanOverflow);
1779 HAllocate*
object = Add<HAllocate>(
1780 size, type, allocation_mode.GetPretenureMode(),
1781 instance_type, allocation_mode.feedback_site());
1784 if (allocation_mode.CreateAllocationMementos()) {
1785 BuildCreateAllocationMemento(
1786 object, object_size, allocation_mode.current_site());
1796 HValue* length = AddUncasted<HAdd>(left_length, right_length);
1797 HValue* max_length = Add<HConstant>(String::kMaxLength);
1798 Add<HBoundsCheck>(length, max_length);
1803 HValue* HGraphBuilder::BuildCreateConsString(
1807 HAllocationMode allocation_mode) {
1809 HInstruction* left_instance_type = AddLoadStringInstanceType(left);
1810 HInstruction* right_instance_type = AddLoadStringInstanceType(right);
1818 HAllocate* result = BuildAllocate(Add<HConstant>(ConsString::kSize),
1823 HValue* anded_instance_types = AddUncasted<HBitwise>(
1824 Token::BIT_AND, left_instance_type, right_instance_type);
1825 HValue* xored_instance_types = AddUncasted<HBitwise>(
1826 Token::BIT_XOR, left_instance_type, right_instance_type);
1837 IfBuilder if_onebyte(
this);
1841 AddUncasted<HBitwise>(
1842 Token::BIT_AND, anded_instance_types,
1843 Add<HConstant>(
static_cast<int32_t>(
1845 graph()->GetConstant0(), Token::NE);
1851 AddUncasted<HBitwise>(
1852 Token::BIT_AND, xored_instance_types,
1853 Add<HConstant>(
static_cast<int32_t>(
1855 Add<HConstant>(static_cast<int32_t>(
1861 AddStoreMapConstantNoWriteBarrier(result, map);
1867 AddStoreMapConstantNoWriteBarrier(result, map);
1872 Add<HStoreNamedField>(result, HObjectAccess::ForStringHashField(),
1873 Add<HConstant>(String::kEmptyHashField));
1874 Add<HStoreNamedField>(result, HObjectAccess::ForStringLength(), length);
1875 Add<HStoreNamedField>(result, HObjectAccess::ForConsStringFirst(), left);
1876 Add<HStoreNamedField>(result, HObjectAccess::ForConsStringSecond(), right);
1879 AddIncrementCounter(isolate()->counters()->string_add_native());
1885 void HGraphBuilder::BuildCopySeqStringChars(
HValue* src,
1892 ASSERT(dst_encoding != String::ONE_BYTE_ENCODING ||
1893 src_encoding == String::ONE_BYTE_ENCODING);
1894 LoopBuilder loop(
this, context(), LoopBuilder::kPostIncrement);
1895 HValue* index = loop.BeginBody(graph()->GetConstant0(), length, Token::LT);
1897 HValue* src_index = AddUncasted<HAdd>(src_offset, index);
1899 AddUncasted<HSeqStringGetChar>(src_encoding, src, src_index);
1900 HValue* dst_index = AddUncasted<HAdd>(dst_offset, index);
1901 Add<HSeqStringSetChar>(dst_encoding, dst, dst_index, value);
1907 HValue* HGraphBuilder::BuildObjectSizeAlignment(
1908 HValue* unaligned_size,
int header_size) {
1911 unaligned_size, Add<HConstant>(
static_cast<int32_t>(
1914 return AddUncasted<HBitwise>(
1915 Token::BIT_AND,
size, Add<HConstant>(
static_cast<int32_t>(
1916 ~kObjectAlignmentMask)));
1920 HValue* HGraphBuilder::BuildUncheckedStringAdd(
1923 HAllocationMode allocation_mode) {
1925 HValue* left_length = AddLoadStringLength(left);
1926 HValue* right_length = AddLoadStringLength(right);
1929 HValue* length = BuildAddStringLengths(left_length, right_length);
1932 if (left_length->IsConstant()) {
1933 HConstant* c_left_length = HConstant::cast(left_length);
1934 ASSERT_NE(0, c_left_length->Integer32Value());
1935 if (c_left_length->Integer32Value() + 1 >= ConsString::kMinLength) {
1937 return BuildCreateConsString(length, left, right, allocation_mode);
1939 }
else if (right_length->IsConstant()) {
1940 HConstant* c_right_length = HConstant::cast(right_length);
1941 ASSERT_NE(0, c_right_length->Integer32Value());
1942 if (c_right_length->Integer32Value() + 1 >= ConsString::kMinLength) {
1944 return BuildCreateConsString(length, left, right, allocation_mode);
1949 IfBuilder if_createcons(
this);
1951 length, Add<HConstant>(ConsString::kMinLength), Token::GTE);
1952 if_createcons.Then();
1955 Push(BuildCreateConsString(length, left, right, allocation_mode));
1957 if_createcons.Else();
1960 HValue* left_instance_type = AddLoadStringInstanceType(left);
1961 HValue* right_instance_type = AddLoadStringInstanceType(right);
1964 HValue* ored_instance_types = AddUncasted<HBitwise>(
1965 Token::BIT_OR, left_instance_type, right_instance_type);
1966 HValue* xored_instance_types = AddUncasted<HBitwise>(
1967 Token::BIT_XOR, left_instance_type, right_instance_type);
1971 IfBuilder if_sameencodingandsequential(
this);
1973 AddUncasted<HBitwise>(
1974 Token::BIT_AND, xored_instance_types,
1977 if_sameencodingandsequential.And();
1980 AddUncasted<HBitwise>(
1981 Token::BIT_AND, ored_instance_types,
1984 if_sameencodingandsequential.Then();
1986 HConstant* string_map =
1987 Add<HConstant>(isolate()->factory()->string_map());
1988 HConstant* ascii_string_map =
1989 Add<HConstant>(isolate()->factory()->ascii_string_map());
1992 IfBuilder if_onebyte(
this);
1995 AddUncasted<HBitwise>(
1996 Token::BIT_AND, ored_instance_types,
1998 graph()->GetConstant0(), Token::NE);
2003 Push(ascii_string_map);
2008 HValue*
size = AddUncasted<HShl>(length, graph()->GetConstant1());
2010 size->
SetFlag(HValue::kUint32);
2020 HValue*
size = BuildObjectSizeAlignment(Pop(), SeqString::kHeaderSize);
2024 HAllocate* result = BuildAllocate(
2025 size, HType::String(),
STRING_TYPE, allocation_mode);
2028 AddStoreMapNoWriteBarrier(result, map);
2031 Add<HStoreNamedField>(result, HObjectAccess::ForStringHashField(),
2032 Add<HConstant>(String::kEmptyHashField));
2033 Add<HStoreNamedField>(result, HObjectAccess::ForStringLength(), length);
2036 IfBuilder if_twobyte(
this);
2041 BuildCopySeqStringChars(
2042 left, graph()->GetConstant0(), String::TWO_BYTE_ENCODING,
2043 result, graph()->GetConstant0(), String::TWO_BYTE_ENCODING,
2047 BuildCopySeqStringChars(
2048 right, graph()->GetConstant0(), String::TWO_BYTE_ENCODING,
2049 result, left_length, String::TWO_BYTE_ENCODING,
2055 BuildCopySeqStringChars(
2056 left, graph()->GetConstant0(), String::ONE_BYTE_ENCODING,
2057 result, graph()->GetConstant0(), String::ONE_BYTE_ENCODING,
2061 BuildCopySeqStringChars(
2062 right, graph()->GetConstant0(), String::ONE_BYTE_ENCODING,
2063 result, left_length, String::ONE_BYTE_ENCODING,
2069 AddIncrementCounter(isolate()->counters()->string_add_native());
2074 if_sameencodingandsequential.Else();
2077 Add<HPushArgument>(left);
2078 Add<HPushArgument>(right);
2079 Push(Add<HCallRuntime>(
2080 isolate()->factory()->empty_string(),
2081 Runtime::FunctionForId(Runtime::kHiddenStringAdd),
2084 if_sameencodingandsequential.End();
2086 if_createcons.End();
2095 HAllocationMode allocation_mode) {
2096 NoObservableSideEffectsScope no_effects(
this);
2099 HValue* left_length = AddLoadStringLength(left);
2100 HValue* right_length = AddLoadStringLength(right);
2103 IfBuilder if_leftempty(
this);
2105 left_length, graph()->GetConstant0(),
Token::EQ);
2106 if_leftempty.Then();
2109 AddIncrementCounter(isolate()->counters()->string_add_native());
2114 if_leftempty.Else();
2117 IfBuilder if_rightempty(
this);
2119 right_length, graph()->GetConstant0(),
Token::EQ);
2120 if_rightempty.Then();
2123 AddIncrementCounter(isolate()->counters()->string_add_native());
2128 if_rightempty.Else();
2131 Push(BuildUncheckedStringAdd(left, right, allocation_mode));
2133 if_rightempty.End();
2166 HValue* elements = AddLoadElements(checked_object);
2167 if (access_type ==
STORE && (fast_elements || fast_smi_only_elements) &&
2169 HCheckMaps* check_cow_map = Add<HCheckMaps>(
2170 elements, isolate()->factory()->fixed_array_map(), top_info());
2171 check_cow_map->ClearDependsOnFlag(kElementsKind);
2175 length = Add<HLoadNamedField>(
2177 HObjectAccess::ForArrayLength(elements_kind));
2179 length = AddLoadFixedArrayLength(elements);
2187 backing_store = Add<HLoadNamedField>(
2189 HObjectAccess::ForExternalArrayExternalPointer());
2191 backing_store = elements;
2194 NoObservableSideEffectsScope no_effects(
this);
2195 IfBuilder length_checker(
this);
2197 length_checker.Then();
2198 IfBuilder negative_checker(
this);
2200 key, graph()->GetConstant0(), Token::GTE);
2201 negative_checker.Then();
2203 backing_store, key, val, bounds_check, elements_kind, access_type);
2204 negative_checker.ElseDeopt(
"Negative key encountered");
2205 negative_checker.End();
2206 length_checker.End();
2210 checked_key = Add<HBoundsCheck>(key, length);
2211 return AddElementAccess(
2212 backing_store, checked_key, val,
2213 checked_object, elements_kind, access_type);
2216 ASSERT(fast_smi_only_elements ||
2224 !val->
type().IsSmi()) {
2225 val = AddUncasted<HForceRepresentation>(val, Representation::Smi());
2228 if (IsGrowStoreMode(store_mode)) {
2229 NoObservableSideEffectsScope no_effects(
this);
2230 elements = BuildCheckForCapacityGrow(checked_object, elements,
2231 elements_kind, length, key,
2232 is_js_array, access_type);
2235 checked_key = Add<HBoundsCheck>(key, length);
2237 if (access_type ==
STORE && (fast_elements || fast_smi_only_elements)) {
2239 NoObservableSideEffectsScope no_effects(
this);
2240 elements = BuildCopyElementsOnWrite(checked_object, elements,
2241 elements_kind, length);
2243 HCheckMaps* check_cow_map = Add<HCheckMaps>(
2244 elements, isolate()->factory()->fixed_array_map(), top_info());
2245 check_cow_map->ClearDependsOnFlag(kElementsKind);
2249 return AddElementAccess(elements, checked_key, val, checked_object,
2250 elements_kind, access_type, load_mode);
2255 HValue* HGraphBuilder::BuildAllocateArrayFromLength(
2256 JSArrayBuilder* array_builder,
2257 HValue* length_argument) {
2258 if (length_argument->IsConstant() &&
2259 HConstant::cast(length_argument)->HasSmiValue()) {
2260 int array_length = HConstant::cast(length_argument)->Integer32Value();
2261 HValue* new_object = array_length == 0
2262 ? array_builder->AllocateEmptyArray()
2263 : array_builder->AllocateArray(length_argument, length_argument);
2267 HValue* constant_zero = graph()->GetConstant0();
2268 HConstant* max_alloc_length =
2269 Add<HConstant>(JSObject::kInitialMaxFastElementArray);
2270 HInstruction* checked_length = Add<HBoundsCheck>(length_argument,
2272 IfBuilder if_builder(
this);
2276 const int initial_capacity = JSArray::kPreallocatedArrayElements;
2277 HConstant* initial_capacity_node = Add<HConstant>(initial_capacity);
2278 Push(initial_capacity_node);
2279 Push(constant_zero);
2281 if (!(top_info()->IsStub()) &&
2284 if_builder.Deopt(
"Holey array despite packed elements_kind feedback");
2286 Push(checked_length);
2287 Push(checked_length);
2293 HValue* capacity = Pop();
2294 return array_builder->AllocateArray(capacity, length);
2310 HConstant* elements_size_value = Add<HConstant>(elements_size);
2311 HValue* mul = AddUncasted<HMul>(capacity, elements_size_value);
2314 HConstant* header_size = Add<HConstant>(FixedArray::kHeaderSize);
2315 HValue* total_size = AddUncasted<HAdd>(mul, header_size);
2316 total_size->
ClearFlag(HValue::kCanOverflow);
2318 PretenureFlag pretenure_flag = !FLAG_allocation_site_pretenuring ?
2319 isolate()->heap()->GetPretenureMode() :
NOT_TENURED;
2321 return Add<HAllocate>(total_size, HType::Tagged(), pretenure_flag,
2326 void HGraphBuilder::BuildInitializeElementsHeader(
HValue* elements,
2329 Factory* factory = isolate()->factory();
2331 ? factory->fixed_double_array_map()
2332 : factory->fixed_array_map();
2334 AddStoreMapConstant(elements, map);
2335 Add<HStoreNamedField>(elements, HObjectAccess::ForFixedArrayLength(),
2340 HValue* HGraphBuilder::BuildAllocateElementsAndInitializeElementsHeader(
2345 capacity = AddUncasted<HForceRepresentation>(capacity, Representation::Smi());
2346 HValue* new_elements = BuildAllocateElements(kind, capacity);
2347 BuildInitializeElementsHeader(new_elements, kind, capacity);
2348 return new_elements;
2352 HInnerAllocatedObject* HGraphBuilder::BuildJSArrayHeader(
HValue* array,
2356 HValue* allocation_site_payload,
2359 Add<HStoreNamedField>(array, HObjectAccess::ForMap(), array_map);
2361 HConstant* empty_fixed_array =
2362 Add<HConstant>(isolate()->factory()->empty_fixed_array());
2364 HObjectAccess access = HObjectAccess::ForPropertiesPointer();
2365 Add<HStoreNamedField>(array, access, empty_fixed_array);
2366 Add<HStoreNamedField>(array, HObjectAccess::ForArrayLength(elements_kind),
2370 BuildCreateAllocationMemento(
2371 array, Add<HConstant>(JSArray::kSize), allocation_site_payload);
2374 int elements_location = JSArray::kSize;
2376 elements_location += AllocationMemento::kSize;
2379 HInnerAllocatedObject* elements = Add<HInnerAllocatedObject>(
2380 array, Add<HConstant>(elements_location));
2381 Add<HStoreNamedField>(array, HObjectAccess::ForElementsPointer(), elements);
2394 if (access_type ==
STORE) {
2398 val = Add<HClampToUint8>(val);
2400 return Add<HStoreKeyed>(elements, checked_key, val, elements_kind,
2408 HLoadKeyed* load = Add<HLoadKeyed>(
2409 elements, checked_key, dependency, elements_kind, load_mode);
2410 if (FLAG_opt_safe_uint32_operations &&
2413 graph()->RecordUint32Instruction(load);
2419 HLoadNamedField* HGraphBuilder::AddLoadElements(
HValue*
object) {
2420 return Add<HLoadNamedField>(
2421 object,
static_cast<HValue*
>(
NULL), HObjectAccess::ForElementsPointer());
2425 HLoadNamedField* HGraphBuilder::AddLoadFixedArrayLength(
HValue*
object) {
2426 return Add<HLoadNamedField>(
2427 object,
static_cast<HValue*
>(
NULL), HObjectAccess::ForFixedArrayLength());
2432 HValue* half_old_capacity = AddUncasted<HShr>(old_capacity,
2433 graph_->GetConstant1());
2435 HValue* new_capacity = AddUncasted<HAdd>(half_old_capacity, old_capacity);
2436 new_capacity->
ClearFlag(HValue::kCanOverflow);
2438 HValue* min_growth = Add<HConstant>(16);
2440 new_capacity = AddUncasted<HAdd>(new_capacity, min_growth);
2441 new_capacity->
ClearFlag(HValue::kCanOverflow);
2443 return new_capacity;
2450 int max_size = Page::kMaxRegularHeapObjectSize / element_size;
2451 max_size -= JSArray::kSize / element_size;
2452 HConstant* max_size_constant = Add<HConstant>(max_size);
2453 Add<HBoundsCheck>(length, max_size_constant);
2463 BuildNewSpaceArrayCheck(new_capacity, new_kind);
2465 HValue* new_elements = BuildAllocateElementsAndInitializeElementsHeader(
2466 new_kind, new_capacity);
2468 BuildCopyElements(elements, kind,
2469 new_elements, new_kind,
2470 length, new_capacity);
2472 Add<HStoreNamedField>(object, HObjectAccess::ForElementsPointer(),
2475 return new_elements;
2479 void HGraphBuilder::BuildFillElementsWithHole(
HValue* elements,
2485 Factory* factory = isolate()->factory();
2487 double nan_double = FixedDoubleArray::hole_nan_as_double();
2489 ? Add<HConstant>(factory->the_hole_value())
2490 : Add<HConstant>(nan_double);
2493 static const int kLoopUnfoldLimit = 8;
2494 STATIC_ASSERT(JSArray::kPreallocatedArrayElements <= kLoopUnfoldLimit);
2495 int initial_capacity = -1;
2500 if (constant_from == 0 && constant_to <= kLoopUnfoldLimit) {
2501 initial_capacity = constant_to;
2511 if (initial_capacity >= 0) {
2512 for (
int i = 0; i < initial_capacity; i++) {
2514 Add<HStoreKeyed>(elements, key, hole, elements_kind);
2517 LoopBuilder builder(
this, context(), LoopBuilder::kPostIncrement);
2519 HValue* key = builder.BeginBody(from, to, Token::LT);
2521 Add<HStoreKeyed>(elements, key, hole, elements_kind);
2528 void HGraphBuilder::BuildCopyElements(
HValue* from_elements,
2534 bool pre_fill_with_holes =
2538 if (pre_fill_with_holes) {
2542 BuildFillElementsWithHole(to_elements, to_elements_kind,
2543 graph()->GetConstant0(), capacity);
2546 LoopBuilder builder(
this, context(), LoopBuilder::kPostIncrement);
2548 HValue* key = builder.BeginBody(graph()->GetConstant0(), length, Token::LT);
2550 HValue* element = Add<HLoadKeyed>(from_elements, key,
2560 from_elements_kind != to_elements_kind) {
2561 IfBuilder if_hole(
this);
2562 if_hole.If<HCompareHoleAndBranch>(element);
2565 ? Add<HConstant>(FixedDoubleArray::hole_nan_as_double())
2566 : graph()->GetConstantHole();
2567 Add<HStoreKeyed>(to_elements, key, hole_constant, kind);
2569 HStoreKeyed* store = Add<HStoreKeyed>(to_elements, key, element, kind);
2570 store->SetFlag(HValue::kAllowUndefinedAsNaN);
2573 HStoreKeyed* store = Add<HStoreKeyed>(to_elements, key, element, kind);
2574 store->SetFlag(HValue::kAllowUndefinedAsNaN);
2579 if (!pre_fill_with_holes && length != capacity) {
2581 BuildFillElementsWithHole(to_elements, to_elements_kind,
2592 NoObservableSideEffectsScope no_effects(
this);
2595 int size = JSArray::kSize;
2597 size += AllocationMemento::kSize;
2600 HValue* size_in_bytes = Add<HConstant>(
size);
2607 for (
int i = 0; i < JSArray::kSize; i +=
kPointerSize) {
2608 if ((i != JSArray::kElementsOffset) || (length == 0)) {
2609 HObjectAccess access = HObjectAccess::ForJSArrayOffset(i);
2610 Add<HStoreNamedField>(
2611 object, access, Add<HLoadNamedField>(
2612 boilerplate,
static_cast<HValue*
>(
NULL), access));
2618 BuildCreateAllocationMemento(
2619 object, Add<HConstant>(JSArray::kSize), allocation_site);
2623 HValue* boilerplate_elements = AddLoadElements(boilerplate);
2626 HValue* elems_size = Add<HConstant>(FixedDoubleArray::SizeFor(length));
2627 object_elements = Add<HAllocate>(elems_size, HType::Tagged(),
2630 HValue* elems_size = Add<HConstant>(FixedArray::SizeFor(length));
2631 object_elements = Add<HAllocate>(elems_size, HType::Tagged(),
2634 Add<HStoreNamedField>(object, HObjectAccess::ForElementsPointer(),
2638 for (
int i = 0; i < FixedArrayBase::kHeaderSize; i +=
kPointerSize) {
2639 HObjectAccess access = HObjectAccess::ForFixedArrayHeader(i);
2640 Add<HStoreNamedField>(
2641 object_elements, access, Add<HLoadNamedField>(
2642 boilerplate_elements,
static_cast<HValue*
>(
NULL), access));
2649 for (
int i = 0; i < length; i++) {
2650 HValue* key_constant = Add<HConstant>(i);
2651 HInstruction* value = Add<HLoadKeyed>(boilerplate_elements, key_constant,
2653 Add<HStoreKeyed>(object_elements, key_constant, value, kind);
2661 void HGraphBuilder::BuildCompareNil(
2664 HIfContinuation* continuation) {
2665 IfBuilder if_nil(
this);
2666 bool some_case_handled =
false;
2667 bool some_case_missing =
false;
2670 if (some_case_handled) if_nil.Or();
2672 some_case_handled =
true;
2674 some_case_missing =
true;
2678 if (some_case_handled) if_nil.Or();
2680 graph()->GetConstantUndefined());
2681 some_case_handled =
true;
2683 some_case_missing =
true;
2686 if (type->
Maybe(Type::Undetectable())) {
2687 if (some_case_handled) if_nil.Or();
2688 if_nil.If<HIsUndetectableAndBranch>(value);
2689 some_case_handled =
true;
2691 some_case_missing =
true;
2694 if (some_case_missing) {
2698 BuildCheckHeapObject(value);
2703 BuildCheckMap(value, type->
Classes().Current());
2705 if_nil.Deopt(
"Too many undetectable types");
2709 if_nil.CaptureContinuation(continuation);
2713 void HGraphBuilder::BuildCreateAllocationMemento(
2715 HValue* previous_object_size,
2716 HValue* allocation_site) {
2718 HInnerAllocatedObject* allocation_memento = Add<HInnerAllocatedObject>(
2719 previous_object, previous_object_size);
2720 AddStoreMapConstant(
2721 allocation_memento, isolate()->factory()->allocation_memento_map());
2722 Add<HStoreNamedField>(
2724 HObjectAccess::ForAllocationMementoSite(),
2726 if (FLAG_allocation_site_pretenuring) {
2727 HValue* memento_create_count = Add<HLoadNamedField>(
2729 HObjectAccess::ForAllocationSiteOffset(
2730 AllocationSite::kPretenureCreateCountOffset));
2731 memento_create_count = AddUncasted<HAdd>(
2732 memento_create_count, graph()->GetConstant1());
2735 memento_create_count->
ClearFlag(HValue::kCanOverflow);
2736 HStoreNamedField* store = Add<HStoreNamedField>(
2737 allocation_site, HObjectAccess::ForAllocationSiteOffset(
2738 AllocationSite::kPretenureCreateCountOffset), memento_create_count);
2740 store->SkipWriteBarrier();
2748 Add<HLoadNamedField>(closure,
static_cast<HValue*
>(
NULL),
2749 HObjectAccess::ForFunctionContextPointer());
2752 HObjectAccess::ForContextSlot(Context::GLOBAL_OBJECT_INDEX));
2753 HObjectAccess access = HObjectAccess::ForObservableJSObjectOffset(
2754 GlobalObject::kNativeContextOffset);
2755 return Add<HLoadNamedField>(
2756 global_object,
static_cast<HValue*
>(
NULL), access);
2762 HValue* global_object = Add<HLoadNamedField>(
2764 HObjectAccess::ForContextSlot(Context::GLOBAL_OBJECT_INDEX));
2765 return Add<HLoadNamedField>(
2767 HObjectAccess::ForObservableJSObjectOffset(
2768 GlobalObject::kNativeContextOffset));
2773 HInstruction* native_context = BuildGetNativeContext();
2775 Add<HConstant>(
static_cast<int32_t>(Context::ARRAY_FUNCTION_INDEX));
2776 return Add<HLoadKeyed>(
2781 HGraphBuilder::JSArrayBuilder::JSArrayBuilder(
HGraphBuilder* builder,
2783 HValue* allocation_site_payload,
2784 HValue* constructor_function,
2788 allocation_site_payload_(allocation_site_payload),
2789 constructor_function_(constructor_function) {
2790 ASSERT(!allocation_site_payload->IsConstant() ||
2791 HConstant::cast(allocation_site_payload)->handle(
2792 builder_->isolate())->IsAllocationSite());
2799 HGraphBuilder::JSArrayBuilder::JSArrayBuilder(HGraphBuilder* builder,
2801 HValue* constructor_function) :
2805 allocation_site_payload_(
NULL),
2806 constructor_function_(constructor_function) {
2810 HValue* HGraphBuilder::JSArrayBuilder::EmitMapCode() {
2811 if (!builder()->top_info()->IsStub()) {
2813 Handle<Map>
map(builder()->isolate()->get_initial_js_array_map(kind_),
2814 builder()->isolate());
2815 return builder()->
Add<HConstant>(
map);
2821 HObjectAccess access = HObjectAccess::ForPrototypeOrInitialMap();
2822 return builder()->
Add<HLoadNamedField>(
2823 constructor_function_,
static_cast<HValue*
>(
NULL), access);
2828 HInstruction* native_context = constructor_function_ !=
NULL
2832 HInstruction* index = builder()->
Add<HConstant>(
2835 HInstruction* map_array = builder()->
Add<HLoadKeyed>(
2838 HInstruction* kind_index = builder()->
Add<HConstant>(kind_);
2840 return builder()->
Add<HLoadKeyed>(
2845 HValue* HGraphBuilder::JSArrayBuilder::EmitInternalMapCode() {
2847 HObjectAccess access = HObjectAccess::ForPrototypeOrInitialMap();
2848 return builder()->
Add<HLoadNamedField>(
2849 constructor_function_,
static_cast<HValue*
>(
NULL), access);
2853 HValue* HGraphBuilder::JSArrayBuilder::EstablishAllocationSize(
2854 HValue* length_node) {
2865 HInstruction* elements_size_value =
2866 builder()->
Add<HConstant>(elements_size());
2867 HInstruction* mul = HMul::NewImul(builder()->zone(), builder()->context(),
2868 length_node, elements_size_value);
2870 HInstruction* base = builder()->
Add<HConstant>(base_size);
2871 HInstruction* total_size = HAdd::New(builder()->zone(), builder()->context(),
2879 HValue* HGraphBuilder::JSArrayBuilder::EstablishEmptyArrayAllocationSize() {
2887 : FixedArray::SizeFor(initial_capacity());
2889 return builder()->
Add<HConstant>(base_size);
2893 HValue* HGraphBuilder::JSArrayBuilder::AllocateEmptyArray() {
2894 HValue* size_in_bytes = EstablishEmptyArrayAllocationSize();
2895 HConstant* capacity = builder()->
Add<HConstant>(initial_capacity());
2896 return AllocateArray(size_in_bytes,
2898 builder()->graph()->GetConstant0());
2902 HValue* HGraphBuilder::JSArrayBuilder::AllocateArray(HValue* capacity,
2903 HValue* length_field,
2904 FillMode fill_mode) {
2905 HValue* size_in_bytes = EstablishAllocationSize(capacity);
2906 return AllocateArray(size_in_bytes, capacity, length_field, fill_mode);
2910 HValue* HGraphBuilder::JSArrayBuilder::AllocateArray(HValue* size_in_bytes,
2912 HValue* length_field,
2913 FillMode fill_mode) {
2918 builder()->
AddUncasted<HForceRepresentation>(capacity,
2921 builder()->
AddUncasted<HForceRepresentation>(length_field,
2924 HAllocate* new_object = builder()->
Add<HAllocate>(size_in_bytes,
2929 new_object->MakeDoubleAligned();
2934 if (allocation_site_payload_ ==
NULL) {
2935 map = EmitInternalMapCode();
2937 map = EmitMapCode();
2943 allocation_site_payload_,
2949 if (fill_mode == FILL_WITH_HOLE) {
2951 graph()->GetConstant0(), capacity);
2960 return Add<HStoreNamedField>(object, HObjectAccess::ForMap(),
2961 Add<HConstant>(
map));
2966 HValue* global_object = Add<HLoadNamedField>(
2969 HObjectAccess access = HObjectAccess::ForObservableJSObjectOffset(
2971 HValue* builtins = Add<HLoadNamedField>(
2972 global_object,
static_cast<HValue*
>(
NULL), access);
2973 HObjectAccess function_access = HObjectAccess::ForObservableJSObjectOffset(
2975 return Add<HLoadNamedField>(
2976 builtins,
static_cast<HValue*
>(
NULL), function_access);
2982 function_state_(
NULL),
2987 globals_(10, info->zone()),
2988 inline_bailout_(false),
2993 function_state_= &initial_function_state_;
2994 InitializeAstVisitor(info->
zone());
2995 if (FLAG_hydrogen_track_positions) {
3002 HBasicBlock* second,
3004 if (first ==
NULL) {
3006 }
else if (second ==
NULL) {
3009 HBasicBlock* join_block =
graph()->CreateBasicBlock();
3010 Goto(first, join_block);
3011 Goto(second, join_block);
3012 join_block->SetJoinId(join_id);
3019 HBasicBlock* exit_block,
3020 HBasicBlock* continue_block) {
3021 if (continue_block !=
NULL) {
3022 if (exit_block !=
NULL)
Goto(exit_block, continue_block);
3023 continue_block->SetJoinId(statement->
ContinueId());
3024 return continue_block;
3031 HBasicBlock* loop_entry,
3032 HBasicBlock* body_exit,
3033 HBasicBlock* loop_successor,
3034 HBasicBlock* break_block) {
3035 if (body_exit !=
NULL)
Goto(body_exit, loop_entry);
3036 loop_entry->PostProcessLoopHeader(statement);
3037 if (break_block !=
NULL) {
3038 if (loop_successor !=
NULL)
Goto(loop_successor, break_block);
3039 break_block->SetJoinId(statement->
ExitId());
3042 return loop_successor;
3066 Finish(instruction, position);
3071 HGraph::HGraph(CompilationInfo*
info)
3072 : isolate_(info->isolate()),
3075 blocks_(8, info->zone()),
3076 values_(16, info->zone()),
3078 uint32_instructions_(
NULL),
3081 zone_(info->zone()),
3082 is_recursive_(false),
3083 use_optimistic_licm_(false),
3084 depends_on_empty_array_proto_elements_(false),
3085 type_change_checksum_(0),
3086 maximum_environment_size_(0),
3087 no_side_effects_scope_count_(0),
3088 disallow_adding_new_values_(false),
3090 inlined_functions_(5, info->zone()) {
3091 if (info->IsStub()) {
3092 HydrogenCodeStub* stub = info->code_stub();
3093 CodeStubInterfaceDescriptor* descriptor =
3094 stub->GetInterfaceDescriptor(isolate_);
3095 start_environment_ =
3096 new(zone_) HEnvironment(zone_, descriptor->environment_length());
3099 start_environment_ =
3100 new(zone_) HEnvironment(
NULL, info->scope(), info->closure(), zone_);
3104 entry_block_->SetInitialEnvironment(start_environment_);
3108 HBasicBlock* HGraph::CreateBasicBlock() {
3109 HBasicBlock* result =
new(zone()) HBasicBlock(
this);
3110 blocks_.Add(result, zone());
3115 void HGraph::FinalizeUniqueness() {
3117 ASSERT(!OptimizingCompilerThread::IsOptimizerThread(isolate()));
3118 for (
int i = 0; i < blocks()->length(); ++i) {
3119 for (HInstructionIterator it(blocks()->at(i)); !it.Done(); it.Advance()) {
3120 it.Current()->FinalizeUniqueness();
3126 int HGraph::TraceInlinedFunction(
3127 Handle<SharedFunctionInfo> shared,
3128 HSourcePosition position) {
3129 if (!FLAG_hydrogen_track_positions) {
3134 for (;
id < inlined_functions_.length();
id++) {
3135 if (inlined_functions_[
id].shared().is_identical_to(shared)) {
3140 if (
id == inlined_functions_.length()) {
3141 inlined_functions_.Add(InlinedFunctionInfo(shared), zone());
3143 if (!shared->script()->IsUndefined()) {
3145 if (!script->source()->IsUndefined()) {
3146 CodeTracer::Scope tracing_scope(isolate()->GetCodeTracer());
3147 PrintF(tracing_scope.file(),
3148 "--- FUNCTION SOURCE (%s) id{%d,%d} ---\n",
3149 shared->DebugName()->ToCString().get(),
3150 info()->optimization_id(),
3154 ConsStringIteratorOp op;
3155 StringCharacterStream stream(
String::cast(script->source()),
3157 shared->start_position());
3161 shared->end_position() - shared->start_position() + 1;
3162 for (
int i = 0; i < source_len; i++) {
3163 if (stream.HasMore()) {
3164 PrintF(tracing_scope.file(),
"%c", stream.GetNext());
3169 PrintF(tracing_scope.file(),
"\n--- END ---\n");
3174 int inline_id = next_inline_id_++;
3176 if (inline_id != 0) {
3177 CodeTracer::Scope tracing_scope(isolate()->GetCodeTracer());
3178 PrintF(tracing_scope.file(),
"INLINE (%s) id{%d,%d} AS %d AT ",
3179 shared->DebugName()->ToCString().get(),
3180 info()->optimization_id(),
3183 position.PrintTo(tracing_scope.file());
3184 PrintF(tracing_scope.file(),
"\n");
3191 int HGraph::SourcePositionToScriptPosition(HSourcePosition pos) {
3192 if (!FLAG_hydrogen_track_positions || pos.IsUnknown()) {
3196 return inlined_functions_[pos.inlining_id()].start_position() +
3253 HLoopInformation*
loop() {
return loop_; }
3260 return result->SetupSuccessors(zone, block,
NULL, visited);
3267 PerformNonBacktrackingStep(zone, visited, order);
3271 return Backtrack(zone, visited, order);
3277 : father_(father), child_(
NULL), successor_iterator(
NULL) { }
3283 SUCCESSORS_OF_LOOP_HEADER,
3285 SUCCESSORS_OF_LOOP_MEMBER
3289 PostorderProcessor* SetupSuccessors(Zone* zone,
3292 BitVector* visited) {
3293 if (block ==
NULL || visited->Contains(block->block_id()) ||
3298 loop_header_ =
NULL;
3303 visited->Add(block->block_id());
3305 if (block->IsLoopHeader()) {
3306 kind_ = SUCCESSORS_OF_LOOP_HEADER;
3307 loop_header_ =
block;
3308 InitializeSuccessors();
3309 PostorderProcessor* result = Push(zone);
3310 return result->SetupLoopMembers(zone, block, block->loop_information(),
3313 ASSERT(block->IsFinished());
3316 InitializeSuccessors();
3322 PostorderProcessor* SetupLoopMembers(Zone* zone,
3324 HLoopInformation*
loop,
3325 HBasicBlock* loop_header) {
3326 kind_ = LOOP_MEMBERS;
3330 InitializeLoopMembers();
3334 PostorderProcessor* SetupSuccessorsOfLoopMember(
3336 HLoopInformation*
loop,
3337 HBasicBlock* loop_header) {
3338 kind_ = SUCCESSORS_OF_LOOP_MEMBER;
3342 InitializeSuccessors();
3347 PostorderProcessor* Push(Zone* zone) {
3348 if (child_ ==
NULL) {
3349 child_ =
new(zone) PostorderProcessor(
this);
3354 void ClosePostorder(ZoneList<HBasicBlock*>* order, Zone* zone) {
3355 ASSERT(block_->end()->FirstSuccessor() ==
NULL ||
3356 order->Contains(block_->end()->FirstSuccessor()) ||
3357 block_->end()->FirstSuccessor()->IsLoopHeader());
3358 ASSERT(block_->end()->SecondSuccessor() ==
NULL ||
3359 order->Contains(block_->end()->SecondSuccessor()) ||
3360 block_->end()->SecondSuccessor()->IsLoopHeader());
3361 order->Add(block_, zone);
3365 PostorderProcessor* Pop(Zone* zone,
3367 ZoneList<HBasicBlock*>* order) {
3370 case SUCCESSORS_OF_LOOP_HEADER:
3371 ClosePostorder(order, zone);
3375 case SUCCESSORS_OF_LOOP_MEMBER:
3376 if (
block()->IsLoopHeader() &&
block() != loop_->loop_header()) {
3379 return SetupLoopMembers(zone,
block(),
3380 block()->loop_information(), loop_header_);
3392 PostorderProcessor* Backtrack(Zone* zone,
3394 ZoneList<HBasicBlock*>* order) {
3395 PostorderProcessor*
parent = Pop(zone, visited, order);
3396 while (parent !=
NULL) {
3397 PostorderProcessor* next =
3398 parent->PerformNonBacktrackingStep(zone, visited, order);
3402 parent = parent->Pop(zone, visited, order);
3408 PostorderProcessor* PerformNonBacktrackingStep(
3411 ZoneList<HBasicBlock*>* order) {
3412 HBasicBlock* next_block;
3415 next_block = AdvanceSuccessors();
3416 if (next_block !=
NULL) {
3417 PostorderProcessor* result = Push(zone);
3418 return result->SetupSuccessors(zone, next_block,
3419 loop_header_, visited);
3422 case SUCCESSORS_OF_LOOP_HEADER:
3423 next_block = AdvanceSuccessors();
3424 if (next_block !=
NULL) {
3425 PostorderProcessor* result = Push(zone);
3426 return result->SetupSuccessors(zone, next_block,
3431 next_block = AdvanceLoopMembers();
3432 if (next_block !=
NULL) {
3433 PostorderProcessor* result = Push(zone);
3434 return result->SetupSuccessorsOfLoopMember(next_block,
3435 loop_, loop_header_);
3438 case SUCCESSORS_OF_LOOP_MEMBER:
3439 next_block = AdvanceSuccessors();
3440 if (next_block !=
NULL) {
3441 PostorderProcessor* result = Push(zone);
3442 return result->SetupSuccessors(zone, next_block,
3443 loop_header_, visited);
3453 void InitializeSuccessors() {
3456 successor_iterator = HSuccessorIterator(block_->end());
3459 HBasicBlock* AdvanceSuccessors() {
3460 if (!successor_iterator.Done()) {
3461 HBasicBlock* result = successor_iterator.Current();
3462 successor_iterator.Advance();
3469 void InitializeLoopMembers() {
3471 loop_length = loop_->blocks()->length();
3474 HBasicBlock* AdvanceLoopMembers() {
3475 if (loop_index < loop_length) {
3476 HBasicBlock* result = loop_->blocks()->at(loop_index);
3485 PostorderProcessor* father_;
3486 PostorderProcessor* child_;
3487 HLoopInformation* loop_;
3488 HBasicBlock* block_;
3489 HBasicBlock* loop_header_;
3492 HSuccessorIterator successor_iterator;
3496 void HGraph::OrderBlocks() {
3497 CompilationPhase phase(
"H_Block ordering",
info());
3498 BitVector visited(blocks_.length(), zone());
3500 ZoneList<HBasicBlock*> reverse_result(8, zone());
3501 HBasicBlock* start = blocks_[0];
3502 PostorderProcessor* postorder =
3504 while (postorder !=
NULL) {
3505 postorder = postorder->PerformStep(zone(), &visited, &reverse_result);
3509 for (
int i = reverse_result.length() - 1; i >= 0; --i) {
3510 HBasicBlock* b = reverse_result[i];
3511 blocks_.Add(b, zone());
3512 b->set_block_id(index++);
3517 void HGraph::AssignDominators() {
3518 HPhase phase(
"H_Assign dominators",
this);
3519 for (
int i = 0; i < blocks_.length(); ++i) {
3520 HBasicBlock* block = blocks_[i];
3521 if (block->IsLoopHeader()) {
3524 block->AssignCommonDominator(block->predecessors()->first());
3525 block->AssignLoopSuccessorDominators();
3527 for (
int j = blocks_[i]->predecessors()->length() - 1; j >= 0; --j) {
3528 blocks_[i]->AssignCommonDominator(blocks_[i]->predecessors()->at(j));
3535 bool HGraph::CheckArgumentsPhiUses() {
3536 int block_count = blocks_.length();
3537 for (
int i = 0; i < block_count; ++i) {
3538 for (
int j = 0; j < blocks_[i]->phis()->length(); ++j) {
3539 HPhi* phi = blocks_[i]->phis()->at(j);
3548 bool HGraph::CheckConstPhiUses() {
3549 int block_count = blocks_.length();
3550 for (
int i = 0; i < block_count; ++i) {
3551 for (
int j = 0; j < blocks_[i]->phis()->length(); ++j) {
3552 HPhi* phi = blocks_[i]->phis()->at(j);
3554 for (
int k = 0; k < phi->OperandCount(); k++) {
3555 if (phi->OperandAt(k) == GetConstantHole())
return false;
3563 void HGraph::CollectPhis() {
3564 int block_count = blocks_.length();
3565 phi_list_ =
new(zone()) ZoneList<HPhi*>(block_count, zone());
3566 for (
int i = 0; i < block_count; ++i) {
3567 for (
int j = 0; j < blocks_[i]->phis()->length(); ++j) {
3568 HPhi* phi = blocks_[i]->phis()->at(j);
3569 phi_list_->Add(phi, zone());
3577 FunctionState::FunctionState(HOptimizedGraphBuilder* owner,
3578 CompilationInfo* info,
3582 compilation_info_(info),
3583 call_context_(
NULL),
3584 inlining_kind_(inlining_kind),
3585 function_return_(
NULL),
3586 test_context_(
NULL),
3588 arguments_object_(
NULL),
3589 arguments_elements_(
NULL),
3590 inlining_id_(inlining_id),
3591 outer_source_position_(HSourcePosition::Unknown()),
3592 outer_(owner->function_state()) {
3593 if (outer_ !=
NULL) {
3595 if (owner->ast_context()->IsTest()) {
3596 HBasicBlock* if_true = owner->graph()->CreateBasicBlock();
3597 HBasicBlock* if_false = owner->graph()->CreateBasicBlock();
3598 if_true->MarkAsInlineReturnTarget(owner->current_block());
3599 if_false->MarkAsInlineReturnTarget(owner->current_block());
3600 TestContext* outer_test_context = TestContext::cast(owner->ast_context());
3601 Expression* cond = outer_test_context->condition();
3604 test_context_ =
new TestContext(owner, cond, if_true, if_false);
3606 function_return_ = owner->graph()->CreateBasicBlock();
3607 function_return()->MarkAsInlineReturnTarget(owner->current_block());
3610 call_context_ = owner->ast_context();
3614 owner->set_function_state(
this);
3616 if (FLAG_hydrogen_track_positions) {
3617 outer_source_position_ = owner->source_position();
3618 owner->EnterInlinedSource(
3619 info->shared_info()->start_position(),
3621 owner->SetSourcePosition(info->shared_info()->start_position());
3626 FunctionState::~FunctionState() {
3627 delete test_context_;
3628 owner_->set_function_state(outer_);
3630 if (FLAG_hydrogen_track_positions) {
3631 owner_->set_source_position(outer_source_position_);
3632 owner_->EnterInlinedSource(
3633 outer_->compilation_info()->shared_info()->start_position(),
3634 outer_->inlining_id());
3644 outer_(owner->ast_context()),
3645 for_typeof_(false) {
3649 original_length_ = owner->
environment()->length();
3659 EffectContext::~EffectContext() {
3660 ASSERT(owner()->HasStackOverflow() ||
3661 owner()->current_block() ==
NULL ||
3662 (owner()->environment()->length() == original_length_ &&
3663 owner()->environment()->frame_type() ==
JS_FUNCTION));
3667 ValueContext::~ValueContext() {
3668 ASSERT(owner()->HasStackOverflow() ||
3669 owner()->current_block() ==
NULL ||
3670 (owner()->environment()->length() == original_length_ + 1 &&
3671 owner()->environment()->frame_type() ==
JS_FUNCTION));
3675 void EffectContext::ReturnValue(HValue* value) {
3680 void ValueContext::ReturnValue(HValue* value) {
3684 owner()->Bailout(kBadValueContextForArgumentsValue);
3686 owner()->Push(value);
3690 void TestContext::ReturnValue(HValue* value) {
3695 void EffectContext::ReturnInstruction(HInstruction* instr, BailoutId ast_id) {
3696 ASSERT(!instr->IsControlInstruction());
3697 owner()->AddInstruction(instr);
3698 if (instr->HasObservableSideEffects()) {
3704 void EffectContext::ReturnControl(HControlInstruction* instr,
3706 ASSERT(!instr->HasObservableSideEffects());
3707 HBasicBlock* empty_true = owner()->graph()->CreateBasicBlock();
3708 HBasicBlock* empty_false = owner()->graph()->CreateBasicBlock();
3709 instr->SetSuccessorAt(0, empty_true);
3710 instr->SetSuccessorAt(1, empty_false);
3711 owner()->FinishCurrentBlock(instr);
3712 HBasicBlock* join = owner()->CreateJoin(empty_true, empty_false, ast_id);
3713 owner()->set_current_block(join);
3717 void EffectContext::ReturnContinuation(HIfContinuation* continuation,
3719 HBasicBlock* true_branch =
NULL;
3720 HBasicBlock* false_branch =
NULL;
3721 continuation->Continue(&true_branch, &false_branch);
3722 if (!continuation->IsTrueReachable()) {
3723 owner()->set_current_block(false_branch);
3724 }
else if (!continuation->IsFalseReachable()) {
3725 owner()->set_current_block(true_branch);
3727 HBasicBlock* join = owner()->CreateJoin(true_branch, false_branch, ast_id);
3728 owner()->set_current_block(join);
3733 void ValueContext::ReturnInstruction(HInstruction* instr, BailoutId ast_id) {
3734 ASSERT(!instr->IsControlInstruction());
3736 return owner()->Bailout(kBadValueContextForArgumentsObjectValue);
3738 owner()->AddInstruction(instr);
3739 owner()->Push(instr);
3740 if (instr->HasObservableSideEffects()) {
3746 void ValueContext::ReturnControl(HControlInstruction* instr, BailoutId ast_id) {
3747 ASSERT(!instr->HasObservableSideEffects());
3749 return owner()->Bailout(kBadValueContextForArgumentsObjectValue);
3751 HBasicBlock* materialize_false = owner()->graph()->CreateBasicBlock();
3752 HBasicBlock* materialize_true = owner()->graph()->CreateBasicBlock();
3753 instr->SetSuccessorAt(0, materialize_true);
3754 instr->SetSuccessorAt(1, materialize_false);
3755 owner()->FinishCurrentBlock(instr);
3756 owner()->set_current_block(materialize_true);
3757 owner()->Push(owner()->graph()->GetConstantTrue());
3758 owner()->set_current_block(materialize_false);
3759 owner()->Push(owner()->graph()->GetConstantFalse());
3761 owner()->CreateJoin(materialize_true, materialize_false, ast_id);
3762 owner()->set_current_block(join);
3766 void ValueContext::ReturnContinuation(HIfContinuation* continuation,
3768 HBasicBlock* materialize_true =
NULL;
3769 HBasicBlock* materialize_false =
NULL;
3770 continuation->Continue(&materialize_true, &materialize_false);
3771 if (continuation->IsTrueReachable()) {
3772 owner()->set_current_block(materialize_true);
3773 owner()->Push(owner()->graph()->GetConstantTrue());
3774 owner()->set_current_block(materialize_true);
3776 if (continuation->IsFalseReachable()) {
3777 owner()->set_current_block(materialize_false);
3778 owner()->Push(owner()->graph()->GetConstantFalse());
3779 owner()->set_current_block(materialize_false);
3781 if (continuation->TrueAndFalseReachable()) {
3783 owner()->CreateJoin(materialize_true, materialize_false, ast_id);
3784 owner()->set_current_block(join);
3789 void TestContext::ReturnInstruction(HInstruction* instr, BailoutId ast_id) {
3790 ASSERT(!instr->IsControlInstruction());
3791 HOptimizedGraphBuilder* builder = owner();
3792 builder->AddInstruction(instr);
3795 if (instr->HasObservableSideEffects()) {
3796 builder->Push(instr);
3804 void TestContext::ReturnControl(HControlInstruction* instr, BailoutId ast_id) {
3805 ASSERT(!instr->HasObservableSideEffects());
3806 HBasicBlock* empty_true = owner()->graph()->CreateBasicBlock();
3807 HBasicBlock* empty_false = owner()->graph()->CreateBasicBlock();
3808 instr->SetSuccessorAt(0, empty_true);
3809 instr->SetSuccessorAt(1, empty_false);
3810 owner()->FinishCurrentBlock(instr);
3811 owner()->Goto(empty_true, if_true(), owner()->function_state());
3812 owner()->Goto(empty_false, if_false(), owner()->function_state());
3813 owner()->set_current_block(
NULL);
3817 void TestContext::ReturnContinuation(HIfContinuation* continuation,
3819 HBasicBlock* true_branch =
NULL;
3820 HBasicBlock* false_branch =
NULL;
3821 continuation->Continue(&true_branch, &false_branch);
3822 if (continuation->IsTrueReachable()) {
3823 owner()->Goto(true_branch, if_true(), owner()->function_state());
3825 if (continuation->IsFalseReachable()) {
3826 owner()->Goto(false_branch, if_false(), owner()->function_state());
3828 owner()->set_current_block(
NULL);
3832 void TestContext::BuildBranch(HValue* value) {
3837 HOptimizedGraphBuilder* builder = owner();
3839 builder->Bailout(kArgumentsObjectValueInATestContext);
3841 ToBooleanStub::Types expected(condition()->to_boolean_types());
3842 ReturnControl(owner()->New<HBranch>(value, expected),
BailoutId::None());
3847 #define CHECK_BAILOUT(call) \
3850 if (HasStackOverflow()) return; \
3854 #define CHECK_ALIVE(call) \
3857 if (HasStackOverflow() || current_block() == NULL) return; \
3861 #define CHECK_ALIVE_OR_RETURN(call, value) \
3864 if (HasStackOverflow() || current_block() == NULL) return value; \
3875 EffectContext for_effect(
this);
3882 ValueContext for_value(
this, flag);
3889 for_value.set_for_typeof(
true);
3895 HBasicBlock* true_block,
3896 HBasicBlock* false_block) {
3897 TestContext for_test(
this, expr, true_block, false_block);
3904 for (
int i = 0; i < exprs->length(); ++i) {
3912 Bailout(kFunctionIsAGenerator);
3917 Bailout(kFunctionWithIllegalRedeclaration);
3941 HEnvironment* initial_env =
environment()->CopyWithoutHistory();
3950 VisitVariableDeclaration(scope->
function());
3955 Add<HStackCheck>(HStackCheck::kFunctionEntry);
3958 if (HasStackOverflow())
return false;
3961 Add<HReturn>(
graph()->GetConstantUndefined());
3970 ASSERT(unoptimized_code->kind() == Code::FUNCTION);
3973 int checksum = type_info->own_type_change_checksum();
3974 int composite_checksum =
graph()->update_type_change_checksum(checksum);
3975 graph()->set_use_optimistic_licm(
3976 !type_info->matches_inlined_type_change_checksum(composite_checksum));
3977 type_info->set_inlined_type_change_checksum(composite_checksum);
4001 if (FLAG_analyze_environment_liveness && maximum_environment_size() != 0) {
4002 Run<HEnvironmentLivenessAnalysisPhase>();
4005 if (!CheckConstPhiUses()) {
4006 *bailout_reason = kUnsupportedPhiUseOfConstVariable;
4009 Run<HRedundantPhiEliminationPhase>();
4010 if (!CheckArgumentsPhiUses()) {
4011 *bailout_reason = kUnsupportedPhiUseOfArguments;
4017 Run<HMarkUnreachableBlocksPhase>();
4019 if (FLAG_dead_code_elimination) Run<HDeadCodeEliminationPhase>();
4020 if (FLAG_use_escape_analysis) Run<HEscapeAnalysisPhase>();
4022 if (FLAG_load_elimination) Run<HLoadEliminationPhase>();
4026 if (has_osr()) osr()->FinishOsrValues();
4028 Run<HInferRepresentationPhase>();
4033 Run<HMergeRemovableSimulatesPhase>();
4035 Run<HMarkDeoptimizeOnUndefinedPhase>();
4036 Run<HRepresentationChangesPhase>();
4038 Run<HInferTypesPhase>();
4043 if (FLAG_opt_safe_uint32_operations) Run<HUint32AnalysisPhase>();
4045 if (FLAG_use_canonicalizing) Run<HCanonicalizePhase>();
4047 if (FLAG_use_gvn) Run<HGlobalValueNumberingPhase>();
4049 if (FLAG_check_elimination) Run<HCheckEliminationPhase>();
4051 if (FLAG_store_elimination) Run<HStoreEliminationPhase>();
4053 Run<HRangeAnalysisPhase>();
4055 Run<HComputeChangeUndefinedToNaN>();
4058 Run<HStackCheckEliminationPhase>();
4060 if (FLAG_array_bounds_checks_elimination) Run<HBoundsCheckEliminationPhase>();
4061 if (FLAG_array_bounds_checks_hoisting) Run<HBoundsCheckHoistingPhase>();
4062 if (FLAG_array_index_dehoisting) Run<HDehoistIndexComputationsPhase>();
4063 if (FLAG_dead_code_elimination) Run<HDeadCodeEliminationPhase>();
4065 RestoreActualValues();
4069 Run<HMarkUnreachableBlocksPhase>();
4075 void HGraph::RestoreActualValues() {
4076 HPhase phase(
"H_Restore actual values",
this);
4078 for (
int block_index = 0; block_index < blocks()->length(); block_index++) {
4079 HBasicBlock* block = blocks()->at(block_index);
4082 for (
int i = 0; i < block->phis()->length(); i++) {
4083 HPhi* phi = block->phis()->at(i);
4084 ASSERT(phi->ActualValue() == phi);
4088 for (HInstructionIterator it(block); !it.Done(); it.Advance()) {
4089 HInstruction* instruction = it.Current();
4090 if (instruction->ActualValue() == instruction)
continue;
4095 instruction->DeleteAndReplaceWith(instruction->ActualValue());
4097 ASSERT(instruction->IsInformativeDefinition());
4098 if (instruction->IsPurelyInformativeDefinition()) {
4099 instruction->DeleteAndReplaceWith(instruction->RedefinedOperand());
4101 instruction->ReplaceAllUsesWith(instruction->ActualValue());
4111 for (
int i = 0; i < count; ++i) {
4115 while (!arguments.is_empty()) {
4116 Add<HPushArgument>(arguments.RemoveLast());
4121 template <
class Instruction>
4136 HArgumentsObject* arguments_object =
4137 New<HArgumentsObject>(
environment()->parameter_count());
4138 for (
int i = 0; i <
environment()->parameter_count(); ++i) {
4140 arguments_object->AddArgument(parameter,
zone());
4144 graph()->SetArgumentsObject(arguments_object);
4146 HConstant* undefined_constant =
graph()->GetConstantUndefined();
4148 for (
int i =
environment()->parameter_count() + 1;
4158 return Bailout(kContextAllocatedArguments);
4162 graph()->GetArgumentsObject());
4168 for (
int i = 0; i < statements->length(); i++) {
4171 if (stmt->
IsJump())
break;
4176 void HOptimizedGraphBuilder::VisitBlock(
Block* stmt) {
4177 ASSERT(!HasStackOverflow());
4180 if (stmt->scope() !=
NULL) {
4183 BreakAndContinueInfo break_info(stmt);
4184 { BreakAndContinueScope push(&break_info,
this);
4187 HBasicBlock* break_block = break_info.break_block();
4188 if (break_block !=
NULL) {
4190 break_block->SetJoinId(stmt->ExitId());
4196 void HOptimizedGraphBuilder::VisitExpressionStatement(
4197 ExpressionStatement* stmt) {
4198 ASSERT(!HasStackOverflow());
4205 void HOptimizedGraphBuilder::VisitEmptyStatement(EmptyStatement* stmt) {
4206 ASSERT(!HasStackOverflow());
4212 void HOptimizedGraphBuilder::VisitIfStatement(IfStatement* stmt) {
4213 ASSERT(!HasStackOverflow());
4216 if (stmt->condition()->ToBooleanIsTrue()) {
4217 Add<HSimulate>(stmt->ThenId());
4218 Visit(stmt->then_statement());
4219 }
else if (stmt->condition()->ToBooleanIsFalse()) {
4220 Add<HSimulate>(stmt->ElseId());
4221 Visit(stmt->else_statement());
4223 HBasicBlock* cond_true =
graph()->CreateBasicBlock();
4224 HBasicBlock* cond_false =
graph()->CreateBasicBlock();
4227 if (cond_true->HasPredecessor()) {
4228 cond_true->SetJoinId(stmt->ThenId());
4236 if (cond_false->HasPredecessor()) {
4237 cond_false->SetJoinId(stmt->ElseId());
4245 HBasicBlock* join =
CreateJoin(cond_true, cond_false, stmt->IfId());
4251 HBasicBlock* HOptimizedGraphBuilder::BreakAndContinueScope::Get(
4252 BreakableStatement* stmt,
4256 BreakAndContinueScope* current =
this;
4257 while (current !=
NULL && current->info()->target() != stmt) {
4258 *drop_extra += current->info()->drop_extra();
4259 current = current->next();
4263 if (type ==
BREAK) {
4264 *drop_extra += current->info()->drop_extra();
4267 HBasicBlock* block =
NULL;
4270 block = current->info()->break_block();
4271 if (block ==
NULL) {
4272 block = current->owner()->graph()->CreateBasicBlock();
4273 current->info()->set_break_block(block);
4278 block = current->info()->continue_block();
4279 if (block ==
NULL) {
4280 block = current->owner()->graph()->CreateBasicBlock();
4281 current->info()->set_continue_block(block);
4290 void HOptimizedGraphBuilder::VisitContinueStatement(
4291 ContinueStatement* stmt) {
4292 ASSERT(!HasStackOverflow());
4297 stmt->target(), BreakAndContinueScope::CONTINUE, &drop_extra);
4299 Goto(continue_block);
4304 void HOptimizedGraphBuilder::VisitBreakStatement(BreakStatement* stmt) {
4305 ASSERT(!HasStackOverflow());
4317 void HOptimizedGraphBuilder::VisitReturnStatement(ReturnStatement* stmt) {
4318 ASSERT(!HasStackOverflow());
4323 if (context ==
NULL) {
4327 Add<HReturn>(result);
4332 if (context->IsTest()) {
4333 TestContext* test = TestContext::cast(context);
4335 Goto(test->if_true(), state);
4336 }
else if (context->IsEffect()) {
4340 ASSERT(context->IsValue());
4342 HValue* return_value =
Pop();
4343 HValue* receiver =
environment()->arguments_environment()->Lookup(0);
4344 HHasInstanceTypeAndBranch* typecheck =
4345 New<HHasInstanceTypeAndBranch>(return_value,
4348 HBasicBlock* if_spec_object =
graph()->CreateBasicBlock();
4349 HBasicBlock* not_spec_object =
graph()->CreateBasicBlock();
4350 typecheck->SetSuccessorAt(0, if_spec_object);
4351 typecheck->SetSuccessorAt(1, not_spec_object);
4360 if (context->IsTest()) {
4361 HValue* rhs =
environment()->arguments_environment()->Lookup(1);
4362 context->ReturnValue(rhs);
4363 }
else if (context->IsEffect()) {
4366 ASSERT(context->IsValue());
4367 HValue* rhs =
environment()->arguments_environment()->Lookup(1);
4373 if (context->IsTest()) {
4374 TestContext* test = TestContext::cast(context);
4375 VisitForControl(stmt->expression(), test->if_true(), test->if_false());
4376 }
else if (context->IsEffect()) {
4385 ASSERT(context->IsValue());
4394 void HOptimizedGraphBuilder::VisitWithStatement(WithStatement* stmt) {
4395 ASSERT(!HasStackOverflow());
4398 return Bailout(kWithStatement);
4402 void HOptimizedGraphBuilder::VisitSwitchStatement(SwitchStatement* stmt) {
4403 ASSERT(!HasStackOverflow());
4408 const int kCaseClauseLimit = 128;
4409 ZoneList<CaseClause*>* clauses = stmt->cases();
4410 int clause_count = clauses->length();
4411 ZoneList<HBasicBlock*> body_blocks(clause_count,
zone());
4412 if (clause_count > kCaseClauseLimit) {
4413 return Bailout(kSwitchStatementTooManyClauses);
4417 Add<HSimulate>(stmt->EntryId());
4418 HValue* tag_value =
Top();
4419 Type* tag_type = stmt->tag()->bounds().lower;
4423 for (
int i = 0; i < clause_count; ++i) {
4424 CaseClause* clause = clauses->at(i);
4425 if (clause->is_default()) {
4427 if (default_id.IsNone()) default_id = clause->EntryId();
4433 HValue* label_value =
Pop();
4435 Type* label_type = clause->label()->bounds().lower;
4436 Type* combined_type = clause->compare_type();
4437 HControlInstruction* compare = BuildCompareInstruction(
4438 Token::EQ_STRICT, tag_value, label_value, tag_type, label_type,
4442 PUSH_BEFORE_SIMULATE, clause->id());
4444 HBasicBlock* next_test_block =
graph()->CreateBasicBlock();
4445 HBasicBlock* body_block =
graph()->CreateBasicBlock();
4446 body_blocks.Add(body_block,
zone());
4447 compare->SetSuccessorAt(0, body_block);
4448 compare->SetSuccessorAt(1, next_test_block);
4464 HBasicBlock* fall_through_block =
NULL;
4466 BreakAndContinueInfo break_info(stmt);
4467 { BreakAndContinueScope push(&break_info,
this);
4468 for (
int i = 0; i < clause_count; ++i) {
4469 CaseClause* clause = clauses->at(i);
4473 HBasicBlock* normal_block =
NULL;
4474 if (clause->is_default()) {
4475 if (last_block ==
NULL)
continue;
4476 normal_block = last_block;
4479 normal_block = body_blocks[i];
4482 if (fall_through_block ==
NULL) {
4485 HBasicBlock* join =
CreateJoin(fall_through_block,
4498 HBasicBlock* break_block = break_info.break_block();
4499 if (break_block ==
NULL) {
4504 if (fall_through_block !=
NULL)
Goto(fall_through_block, break_block);
4505 if (last_block !=
NULL)
Goto(last_block, break_block);
4506 break_block->SetJoinId(stmt->ExitId());
4513 HBasicBlock* loop_entry,
4514 BreakAndContinueInfo* break_info) {
4515 BreakAndContinueScope push(break_info,
this);
4517 HStackCheck* stack_check =
4518 HStackCheck::cast(Add<HStackCheck>(HStackCheck::kBackwardsBranch));
4519 ASSERT(loop_entry->IsLoopHeader());
4520 loop_entry->loop_information()->set_stack_check(stack_check);
4525 void HOptimizedGraphBuilder::VisitDoWhileStatement(DoWhileStatement* stmt) {
4526 ASSERT(!HasStackOverflow());
4532 BreakAndContinueInfo break_info(stmt);
4534 HBasicBlock* body_exit =
4536 HBasicBlock* loop_successor =
NULL;
4537 if (body_exit !=
NULL && !stmt->cond()->ToBooleanIsTrue()) {
4539 loop_successor =
graph()->CreateBasicBlock();
4540 if (stmt->cond()->ToBooleanIsFalse()) {
4541 Goto(loop_successor);
4546 body_exit =
graph()->CreateBasicBlock();
4549 if (body_exit !=
NULL && body_exit->HasPredecessor()) {
4550 body_exit->SetJoinId(stmt->BackEdgeId());
4554 if (loop_successor->HasPredecessor()) {
4555 loop_successor->SetJoinId(stmt->ExitId());
4557 loop_successor =
NULL;
4564 break_info.break_block());
4569 void HOptimizedGraphBuilder::VisitWhileStatement(WhileStatement* stmt) {
4570 ASSERT(!HasStackOverflow());
4577 HBasicBlock* loop_successor =
NULL;
4578 if (!stmt->cond()->ToBooleanIsTrue()) {
4579 HBasicBlock* body_entry =
graph()->CreateBasicBlock();
4580 loop_successor =
graph()->CreateBasicBlock();
4582 if (body_entry->HasPredecessor()) {
4583 body_entry->SetJoinId(stmt->BodyId());
4586 if (loop_successor->HasPredecessor()) {
4587 loop_successor->SetJoinId(stmt->ExitId());
4589 loop_successor =
NULL;
4593 BreakAndContinueInfo break_info(stmt);
4597 HBasicBlock* body_exit =
4603 break_info.break_block());
4608 void HOptimizedGraphBuilder::VisitForStatement(ForStatement* stmt) {
4609 ASSERT(!HasStackOverflow());
4612 if (stmt->init() !=
NULL) {
4618 HBasicBlock* loop_successor =
NULL;
4619 if (stmt->cond() !=
NULL) {
4620 HBasicBlock* body_entry =
graph()->CreateBasicBlock();
4621 loop_successor =
graph()->CreateBasicBlock();
4623 if (body_entry->HasPredecessor()) {
4624 body_entry->SetJoinId(stmt->BodyId());
4627 if (loop_successor->HasPredecessor()) {
4628 loop_successor->SetJoinId(stmt->ExitId());
4630 loop_successor =
NULL;
4634 BreakAndContinueInfo break_info(stmt);
4638 HBasicBlock* body_exit =
4641 if (stmt->next() !=
NULL && body_exit !=
NULL) {
4651 break_info.break_block());
4656 void HOptimizedGraphBuilder::VisitForInStatement(ForInStatement* stmt) {
4657 ASSERT(!HasStackOverflow());
4661 if (!FLAG_optimize_for_in) {
4662 return Bailout(kForInStatementOptimizationIsDisabled);
4665 if (stmt->for_in_type() != ForInStatement::FAST_FOR_IN) {
4666 return Bailout(kForInStatementIsNotFastCase);
4669 if (!stmt->each()->IsVariableProxy() ||
4670 !stmt->each()->AsVariableProxy()->var()->IsStackLocal()) {
4671 return Bailout(kForInStatementWithNonLocalEachVariable);
4674 Variable* each_var = stmt->each()->AsVariableProxy()->var();
4677 HValue* enumerable =
Top();
4679 HInstruction* map = Add<HForInPrepareMap>(enumerable);
4680 Add<HSimulate>(stmt->PrepareId());
4682 HInstruction* array = Add<HForInCacheArray>(
4685 HInstruction* enum_length = Add<HMapEnumLength>(
map);
4687 HInstruction* start_index = Add<HConstant>(0);
4694 HInstruction* index_cache = Add<HForInCacheArray>(
4696 HForInCacheArray::cast(array)->set_index_cache(
4697 HForInCacheArray::cast(index_cache));
4701 HValue* index =
environment()->ExpressionStackAt(0);
4702 HValue* limit =
environment()->ExpressionStackAt(1);
4705 HCompareNumericAndBranch* compare_index =
4706 New<HCompareNumericAndBranch>(index, limit, Token::LT);
4707 compare_index->set_observed_input_representation(
4710 HBasicBlock* loop_body =
graph()->CreateBasicBlock();
4711 HBasicBlock* loop_successor =
graph()->CreateBasicBlock();
4713 compare_index->SetSuccessorAt(0, loop_body);
4714 compare_index->SetSuccessorAt(1, loop_successor);
4722 HValue* key = Add<HLoadKeyed>(
4730 Add<HCheckMapValue>(
environment()->ExpressionStackAt(4),
4733 Bind(each_var, key);
4735 BreakAndContinueInfo break_info(stmt, 5);
4738 HBasicBlock* body_exit =
4741 if (body_exit !=
NULL) {
4744 HValue* current_index =
Pop();
4745 Push(AddUncasted<HAdd>(current_index,
graph()->GetConstant1()));
4753 break_info.break_block());
4759 void HOptimizedGraphBuilder::VisitForOfStatement(ForOfStatement* stmt) {
4760 ASSERT(!HasStackOverflow());
4763 return Bailout(kForOfStatement);
4767 void HOptimizedGraphBuilder::VisitTryCatchStatement(TryCatchStatement* stmt) {
4768 ASSERT(!HasStackOverflow());
4771 return Bailout(kTryCatchStatement);
4775 void HOptimizedGraphBuilder::VisitTryFinallyStatement(
4776 TryFinallyStatement* stmt) {
4777 ASSERT(!HasStackOverflow());
4780 return Bailout(kTryFinallyStatement);
4784 void HOptimizedGraphBuilder::VisitDebuggerStatement(DebuggerStatement* stmt) {
4785 ASSERT(!HasStackOverflow());
4788 return Bailout(kDebuggerStatement);
4792 void HOptimizedGraphBuilder::VisitCaseClause(CaseClause* clause) {
4797 void HOptimizedGraphBuilder::VisitFunctionLiteral(FunctionLiteral* expr) {
4798 ASSERT(!HasStackOverflow());
4801 Handle<SharedFunctionInfo> shared_info = expr->shared_info();
4802 if (shared_info.is_null()) {
4806 if (HasStackOverflow())
return;
4807 HFunctionLiteral* instr =
4808 New<HFunctionLiteral>(shared_info, expr->pretenure());
4813 void HOptimizedGraphBuilder::VisitNativeFunctionLiteral(
4814 NativeFunctionLiteral* expr) {
4815 ASSERT(!HasStackOverflow());
4818 return Bailout(kNativeFunctionLiteral);
4822 void HOptimizedGraphBuilder::VisitConditional(Conditional* expr) {
4823 ASSERT(!HasStackOverflow());
4826 HBasicBlock* cond_true =
graph()->CreateBasicBlock();
4827 HBasicBlock* cond_false =
graph()->CreateBasicBlock();
4832 if (cond_true->HasPredecessor()) {
4833 cond_true->SetJoinId(expr->ThenId());
4841 if (cond_false->HasPredecessor()) {
4842 cond_false->SetJoinId(expr->ElseId());
4851 HBasicBlock* join =
CreateJoin(cond_true, cond_false, expr->id());
4860 HOptimizedGraphBuilder::GlobalPropertyAccess
4861 HOptimizedGraphBuilder::LookupGlobalProperty(
4866 Handle<GlobalObject> global(
current_info()->global_object());
4867 global->Lookup(*var->name(), lookup);
4868 if (!lookup->IsNormal() ||
4869 (access_type ==
STORE && lookup->IsReadOnly()) ||
4870 lookup->holder() != *global) {
4878 HValue* HOptimizedGraphBuilder::BuildContextChainWalk(Variable* var) {
4879 ASSERT(var->IsContextSlot());
4882 while (length-- > 0) {
4883 context = Add<HLoadNamedField>(
4891 void HOptimizedGraphBuilder::VisitVariableProxy(VariableProxy* expr) {
4892 if (expr->is_this()) {
4896 ASSERT(!HasStackOverflow());
4899 Variable* variable = expr->var();
4900 switch (variable->location()) {
4904 return Bailout(kReferenceToGlobalLexicalVariable);
4908 Handle<Object> constant_value =
4910 if (!constant_value.is_null()) {
4911 HConstant* instr = New<HConstant>(constant_value);
4915 LookupResult lookup(
isolate());
4916 GlobalPropertyAccess type = LookupGlobalProperty(variable, &lookup,
LOAD);
4918 if (type == kUseCell &&
4919 current_info()->global_object()->IsAccessCheckNeeded()) {
4923 if (type == kUseCell) {
4924 Handle<GlobalObject> global(
current_info()->global_object());
4925 Handle<PropertyCell> cell(global->GetPropertyCell(&lookup));
4926 if (cell->type()->IsConstant()) {
4927 cell->AddDependentCompilationInfo(
top_info());
4928 Handle<Object> constant_object = cell->type()->AsConstant();
4929 if (constant_object->IsConsString()) {
4933 HConstant* constant = New<HConstant>(constant_object);
4936 HLoadGlobalCell* instr =
4937 New<HLoadGlobalCell>(cell, lookup.GetPropertyDetails());
4941 HValue* global_object = Add<HLoadNamedField>(
4944 HLoadGlobalGeneric* instr =
4945 New<HLoadGlobalGeneric>(global_object,
4955 if (value ==
graph()->GetConstantHole()) {
4957 variable->mode() !=
VAR);
4958 return Bailout(kReferenceToUninitializedVariable);
4964 HValue* context = BuildContextChainWalk(variable);
4965 HLoadContextSlot* instr =
new(
zone()) HLoadContextSlot(context, variable);
4970 return Bailout(kReferenceToAVariableWhichRequiresDynamicLookup);
4975 void HOptimizedGraphBuilder::VisitLiteral(Literal* expr) {
4976 ASSERT(!HasStackOverflow());
4979 HConstant* instr = New<HConstant>(expr->value());
4984 void HOptimizedGraphBuilder::VisitRegExpLiteral(RegExpLiteral* expr) {
4985 ASSERT(!HasStackOverflow());
4988 Handle<JSFunction> closure =
function_state()->compilation_info()->closure();
4989 Handle<FixedArray>
literals(closure->literals());
4990 HRegExpLiteral* instr = New<HRegExpLiteral>(
literals,
4993 expr->literal_index());
4998 static bool CanInlinePropertyAccess(
Type* type) {
4999 if (type->Is(Type::NumberOrString()))
return true;
5000 if (!type->IsClass())
return false;
5001 Handle<Map> map = type->AsClass();
5002 return map->IsJSObjectMap() &&
5003 !map->is_dictionary_map() &&
5004 !map->has_named_interceptor();
5011 static bool IsFastLiteral(Handle<JSObject> boilerplate,
5013 int* max_properties) {
5014 if (boilerplate->map()->is_deprecated()) {
5016 if (result.is_null())
return false;
5019 ASSERT(max_depth >= 0 && *max_properties >= 0);
5020 if (max_depth == 0)
return false;
5022 Isolate* isolate = boilerplate->GetIsolate();
5023 Handle<FixedArrayBase> elements(boilerplate->elements());
5024 if (elements->length() > 0 &&
5025 elements->map() != isolate->heap()->fixed_cow_array_map()) {
5026 if (boilerplate->HasFastObjectElements()) {
5028 int length = elements->length();
5029 for (
int i = 0; i < length; i++) {
5030 if ((*max_properties)-- == 0)
return false;
5031 Handle<Object> value(fast_elements->get(i), isolate);
5032 if (value->IsJSObject()) {
5034 if (!IsFastLiteral(value_object,
5041 }
else if (!boilerplate->HasFastDoubleElements()) {
5046 Handle<FixedArray> properties(boilerplate->properties());
5047 if (properties->length() > 0) {
5050 Handle<DescriptorArray> descriptors(
5051 boilerplate->map()->instance_descriptors());
5052 int limit = boilerplate->map()->NumberOfOwnDescriptors();
5053 for (
int i = 0; i < limit; i++) {
5054 PropertyDetails details = descriptors->GetDetails(i);
5055 if (details.type() !=
FIELD)
continue;
5056 int index = descriptors->GetFieldIndex(i);
5057 if ((*max_properties)-- == 0)
return false;
5058 Handle<Object> value(boilerplate->InObjectPropertyAt(index), isolate);
5059 if (value->IsJSObject()) {
5061 if (!IsFastLiteral(value_object,
5073 void HOptimizedGraphBuilder::VisitObjectLiteral(ObjectLiteral* expr) {
5074 ASSERT(!HasStackOverflow());
5077 expr->BuildConstantProperties(
isolate());
5078 Handle<JSFunction> closure =
function_state()->compilation_info()->closure();
5079 HInstruction* literal;
5083 Handle<Object> literals_cell(closure->literals()->get(expr->literal_index()),
5085 Handle<AllocationSite> site;
5086 Handle<JSObject> boilerplate;
5087 if (!literals_cell->IsUndefined()) {
5090 boilerplate = Handle<JSObject>(
JSObject::cast(site->transition_info()),
5094 if (!boilerplate.is_null() &&
5096 AllocationSiteUsageContext usage_context(
isolate(), site,
false);
5097 usage_context.EnterNewScope();
5098 literal = BuildFastLiteral(boilerplate, &usage_context);
5099 usage_context.ExitScope(site, boilerplate);
5101 NoObservableSideEffectsScope no_effects(
this);
5102 Handle<FixedArray> closure_literals(closure->literals(),
isolate());
5103 Handle<FixedArray> constant_properties = expr->constant_properties();
5104 int literal_index = expr->literal_index();
5105 int flags = expr->fast_elements()
5106 ? ObjectLiteral::kFastElements : ObjectLiteral::kNoFlags;
5107 flags |= expr->has_function()
5108 ? ObjectLiteral::kHasFunction : ObjectLiteral::kNoFlags;
5110 Add<HPushArgument>(Add<HConstant>(closure_literals));
5111 Add<HPushArgument>(Add<HConstant>(literal_index));
5112 Add<HPushArgument>(Add<HConstant>(constant_properties));
5113 Add<HPushArgument>(Add<HConstant>(
flags));
5119 literal = Add<HCallRuntime>(
isolate()->
factory()->empty_string(),
5128 expr->CalculateEmitStore(
zone());
5130 for (
int i = 0; i < expr->properties()->length(); i++) {
5131 ObjectLiteral::Property*
property = expr->properties()->at(i);
5132 if (property->IsCompileTimeValue())
continue;
5134 Literal* key =
property->key();
5135 Expression* value =
property->value();
5137 switch (property->kind()) {
5138 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
5141 case ObjectLiteral::Property::COMPUTED:
5142 if (key->value()->IsInternalizedString()) {
5143 if (property->emit_store()) {
5145 HValue* value =
Pop();
5146 Handle<Map> map =
property->GetReceiverType();
5147 Handle<String>
name =
property->key()->AsPropertyName();
5148 HInstruction* store;
5149 if (map.is_null()) {
5152 STORE, literal, name, value));
5155 if (info.CanAccessMonomorphic()) {
5157 ASSERT(!info.lookup()->IsPropertyCallbacks());
5158 store = BuildMonomorphicAccess(
5159 &info, literal, checked_literal, value,
5163 STORE, literal, name, value));
5167 if (store->HasObservableSideEffects()) {
5176 case ObjectLiteral::Property::PROTOTYPE:
5177 case ObjectLiteral::Property::SETTER:
5178 case ObjectLiteral::Property::GETTER:
5179 return Bailout(kObjectLiteralWithComplexProperty);
5184 if (expr->has_function()) {
5190 HToFastProperties* result = Add<HToFastProperties>(
Pop());
5198 void HOptimizedGraphBuilder::VisitArrayLiteral(ArrayLiteral* expr) {
5199 ASSERT(!HasStackOverflow());
5202 expr->BuildConstantElements(
isolate());
5203 ZoneList<Expression*>* subexprs = expr->values();
5204 int length = subexprs->length();
5205 HInstruction* literal;
5207 Handle<AllocationSite> site;
5209 bool uninitialized =
false;
5210 Handle<Object> literals_cell(literals->get(expr->literal_index()),
5212 Handle<JSObject> boilerplate_object;
5213 if (literals_cell->IsUndefined()) {
5214 uninitialized =
true;
5216 isolate(), literals, expr->constant_elements());
5217 if (raw_boilerplate.is_null()) {
5218 return Bailout(kArrayBoilerplateCreationFailed);
5222 AllocationSiteCreationContext creation_context(
isolate());
5223 site = creation_context.EnterNewScope();
5225 return Bailout(kArrayBoilerplateCreationFailed);
5227 creation_context.ExitScope(site, boilerplate_object);
5228 literals->set(expr->literal_index(), *site);
5230 if (boilerplate_object->elements()->map() ==
5235 ASSERT(literals_cell->IsAllocationSite());
5237 boilerplate_object = Handle<JSObject>(
5241 ASSERT(!boilerplate_object.is_null());
5242 ASSERT(site->SitePointsToLiteral());
5245 boilerplate_object->GetElementsKind();
5249 if (IsFastLiteral(boilerplate_object,
5252 AllocationSiteUsageContext usage_context(
isolate(), site,
false);
5253 usage_context.EnterNewScope();
5254 literal = BuildFastLiteral(boilerplate_object, &usage_context);
5255 usage_context.ExitScope(site, boilerplate_object);
5257 NoObservableSideEffectsScope no_effects(
this);
5260 Handle<FixedArray> constants =
isolate()->
factory()->empty_fixed_array();
5261 int literal_index = expr->literal_index();
5262 int flags = expr->depth() == 1
5263 ? ArrayLiteral::kShallowElements
5264 : ArrayLiteral::kNoFlags;
5265 flags |= ArrayLiteral::kDisableMementos;
5267 Add<HPushArgument>(Add<HConstant>(
literals));
5268 Add<HPushArgument>(Add<HConstant>(literal_index));
5269 Add<HPushArgument>(Add<HConstant>(constants));
5270 Add<HPushArgument>(Add<HConstant>(
flags));
5276 literal = Add<HCallRuntime>(
isolate()->
factory()->empty_string(),
5281 Handle<Map> map = Handle<Map>(boilerplate_object->map(),
isolate());
5282 literal = Add<HCheckMaps>(literal,
map,
top_info());
5289 Push(Add<HConstant>(expr->literal_index()));
5291 HInstruction* elements =
NULL;
5293 for (
int i = 0; i < length; i++) {
5294 Expression* subexpr = subexprs->at(i);
5300 HValue* value =
Pop();
5305 HValue* key = Add<HConstant>(i);
5307 switch (boilerplate_elements_kind) {
5314 HStoreKeyed* instr = Add<HStoreKeyed>(elements, key, value,
5315 boilerplate_elements_kind);
5316 instr->SetUninitialized(uninitialized);
5324 Add<HSimulate>(expr->GetIdForElement(i));
5332 HCheckMaps* HOptimizedGraphBuilder::AddCheckMap(HValue*
object,
5339 HInstruction* HOptimizedGraphBuilder::BuildLoadNamedField(
5340 PropertyAccessInfo* info,
5341 HValue* checked_object) {
5342 HObjectAccess access = info->access();
5343 if (access.representation().IsDouble()) {
5345 checked_object = Add<HLoadNamedField>(
5346 checked_object,
static_cast<HValue*
>(
NULL),
5348 checked_object->set_type(HType::HeapNumber());
5350 access = HObjectAccess::ForHeapNumberValue();
5352 return New<HLoadNamedField>(
5353 checked_object,
static_cast<HValue*
>(
NULL), access);
5357 HInstruction* HOptimizedGraphBuilder::BuildStoreNamedField(
5358 PropertyAccessInfo* info,
5359 HValue* checked_object,
5361 bool transition_to_field = info->lookup()->IsTransition();
5363 HObjectAccess field_access = HObjectAccess::ForField(
5364 info->map(), info->lookup(), info->name());
5366 HStoreNamedField *instr;
5367 if (field_access.representation().IsDouble()) {
5368 HObjectAccess heap_number_access =
5370 if (transition_to_field) {
5372 NoObservableSideEffectsScope no_side_effects(
this);
5375 PretenureFlag pretenure_flag = !FLAG_allocation_site_pretenuring ?
5378 HInstruction* heap_number = Add<HAllocate>(heap_number_size,
5379 HType::HeapNumber(),
5383 Add<HStoreNamedField>(heap_number, HObjectAccess::ForHeapNumberValue(),
5385 instr = New<HStoreNamedField>(checked_object->ActualValue(),
5390 HInstruction* heap_number = Add<HLoadNamedField>(
5391 checked_object,
static_cast<HValue*
>(
NULL), heap_number_access);
5392 heap_number->set_type(HType::HeapNumber());
5393 instr = New<HStoreNamedField>(heap_number,
5394 HObjectAccess::ForHeapNumberValue(),
5399 instr = New<HStoreNamedField>(
5400 checked_object->ActualValue(), field_access, value,
5404 if (transition_to_field) {
5405 HConstant* transition_constant = Add<HConstant>(info->transition());
5406 instr->SetTransition(transition_constant,
top_info());
5407 instr->SetChangesFlag(kMaps);
5413 bool HOptimizedGraphBuilder::PropertyAccessInfo::IsCompatible(
5414 PropertyAccessInfo* info) {
5415 if (!CanInlinePropertyAccess(type_))
return false;
5420 if (type_->Is(Type::Number()))
return false;
5424 if (type_->Is(Type::NumberOrString())) {
5425 if (!info->type_->Is(Type::NumberOrString()))
return false;
5427 if (info->type_->Is(Type::NumberOrString()))
return false;
5430 if (!LookupDescriptor())
return false;
5432 if (!lookup_.IsFound()) {
5433 return (!info->lookup_.IsFound() || info->has_holder()) &&
5434 map()->prototype() == info->map()->prototype();
5439 if (info->has_holder())
return false;
5441 if (lookup_.IsPropertyCallbacks()) {
5442 return accessor_.is_identical_to(info->accessor_) &&
5443 api_holder_.is_identical_to(info->api_holder_);
5446 if (lookup_.IsConstant()) {
5447 return constant_.is_identical_to(info->constant_);
5450 ASSERT(lookup_.IsField());
5451 if (!info->lookup_.IsField())
return false;
5453 Representation r = access_.representation();
5455 if (!info->access_.representation().IsCompatibleForLoad(r))
return false;
5457 if (!info->access_.representation().IsCompatibleForStore(r))
return false;
5459 if (info->access_.offset() != access_.offset())
return false;
5460 if (info->access_.IsInobject() != access_.IsInobject())
return false;
5461 info->GeneralizeRepresentation(r);
5466 bool HOptimizedGraphBuilder::PropertyAccessInfo::LookupDescriptor() {
5467 if (!type_->IsClass())
return true;
5468 map()->LookupDescriptor(
NULL, *name_, &lookup_);
5469 return LoadResult(
map());
5473 bool HOptimizedGraphBuilder::PropertyAccessInfo::LoadResult(Handle<Map> map) {
5474 if (!IsLoad() && lookup_.IsProperty() &&
5475 (lookup_.IsReadOnly() || !lookup_.IsCacheable())) {
5479 if (lookup_.IsField()) {
5480 access_ = HObjectAccess::ForField(map, &lookup_, name_);
5481 }
else if (lookup_.IsPropertyCallbacks()) {
5482 Handle<Object> callback(lookup_.GetValueFromMap(*map), isolate());
5483 if (!callback->IsAccessorPair())
return false;
5484 Object* raw_accessor = IsLoad()
5487 if (!raw_accessor->IsJSFunction())
return false;
5489 if (accessor->shared()->IsApiFunction()) {
5490 CallOptimization call_optimization(accessor);
5491 if (!call_optimization.is_simple_api_call())
return false;
5492 CallOptimization::HolderLookup holder_lookup;
5493 api_holder_ = call_optimization.LookupHolderOfExpectedType(
5494 map, &holder_lookup);
5495 switch (holder_lookup) {
5496 case CallOptimization::kHolderNotFound:
5498 case CallOptimization::kHolderIsReceiver:
5499 case CallOptimization::kHolderFound:
5503 accessor_ = accessor;
5504 }
else if (lookup_.IsConstant()) {
5505 constant_ =
handle(lookup_.GetConstantFromMap(*map), isolate());
5512 bool HOptimizedGraphBuilder::PropertyAccessInfo::LookupInPrototypes() {
5513 Handle<Map> map = this->
map();
5515 while (map->prototype()->IsJSObject()) {
5517 if (holder_->map()->is_deprecated()) {
5520 map = Handle<Map>(holder_->map());
5521 if (!CanInlinePropertyAccess(ToType(map))) {
5525 map->LookupDescriptor(*holder_, *name_, &lookup_);
5526 if (lookup_.IsFound())
return LoadResult(map);
5533 bool HOptimizedGraphBuilder::PropertyAccessInfo::CanAccessMonomorphic() {
5534 if (!CanInlinePropertyAccess(type_))
return false;
5535 if (IsJSObjectFieldAccessor())
return IsLoad();
5536 if (!LookupDescriptor())
return false;
5537 if (lookup_.IsFound()) {
5538 if (IsLoad())
return true;
5539 return !lookup_.IsReadOnly() && lookup_.IsCacheable();
5541 if (!LookupInPrototypes())
return false;
5542 if (IsLoad())
return true;
5544 if (lookup_.IsPropertyCallbacks())
return true;
5545 Handle<Map> map = this->
map();
5546 map->LookupTransition(
NULL, *name_, &lookup_);
5547 if (lookup_.IsTransitionToField() && map->unused_property_fields() > 0) {
5554 bool HOptimizedGraphBuilder::PropertyAccessInfo::CanAccessAsMonomorphic(
5555 SmallMapList* types) {
5556 ASSERT(type_->Is(ToType(types->first())));
5557 if (!CanAccessMonomorphic())
return false;
5558 STATIC_ASSERT(kMaxLoadPolymorphism == kMaxStorePolymorphism);
5559 if (types->length() > kMaxLoadPolymorphism)
return false;
5561 HObjectAccess access = HObjectAccess::ForMap();
5562 if (GetJSObjectFieldAccess(&access)) {
5563 for (
int i = 1; i < types->length(); ++i) {
5564 PropertyAccessInfo test_info(
5565 builder_, access_type_, ToType(types->at(i)), name_);
5566 HObjectAccess test_access = HObjectAccess::ForMap();
5567 if (!test_info.GetJSObjectFieldAccess(&test_access))
return false;
5568 if (!access.Equals(test_access))
return false;
5576 if (type_->Is(Type::Number()))
return false;
5579 ASSERT(!IsLoad() || !lookup_.IsTransition());
5580 if (lookup_.IsTransition() && types->length() > 1)
return false;
5582 for (
int i = 1; i < types->length(); ++i) {
5583 PropertyAccessInfo test_info(
5584 builder_, access_type_, ToType(types->at(i)), name_);
5585 if (!test_info.IsCompatible(
this))
return false;
5592 static bool NeedsWrappingFor(
Type* type, Handle<JSFunction> target) {
5593 return type->Is(Type::NumberOrString()) &&
5594 target->shared()->strict_mode() ==
SLOPPY &&
5595 !target->shared()->native();
5599 HInstruction* HOptimizedGraphBuilder::BuildMonomorphicAccess(
5600 PropertyAccessInfo* info,
5602 HValue* checked_object,
5605 BailoutId return_id,
5606 bool can_inline_accessor) {
5608 HObjectAccess access = HObjectAccess::ForMap();
5609 if (info->GetJSObjectFieldAccess(&access)) {
5611 return New<HLoadNamedField>(object, checked_object, access);
5614 HValue* checked_holder = checked_object;
5615 if (info->has_holder()) {
5616 Handle<JSObject> prototype(
JSObject::cast(info->map()->prototype()));
5620 if (!info->lookup()->IsFound()) {
5622 return graph()->GetConstantUndefined();
5625 if (info->lookup()->IsField()) {
5626 if (info->IsLoad()) {
5627 return BuildLoadNamedField(info, checked_holder);
5629 return BuildStoreNamedField(info, checked_object, value);
5633 if (info->lookup()->IsTransition()) {
5635 return BuildStoreNamedField(info, checked_object, value);
5638 if (info->lookup()->IsPropertyCallbacks()) {
5639 Push(checked_object);
5640 int argument_count = 1;
5641 if (!info->IsLoad()) {
5646 if (NeedsWrappingFor(info->type(), info->accessor())) {
5647 HValue*
function = Add<HConstant>(info->accessor());
5649 return New<HCallFunction>(
function, argument_count,
WRAP_AND_CALL);
5650 }
else if (FLAG_inline_accessors && can_inline_accessor) {
5651 bool success = info->IsLoad()
5652 ? TryInlineGetter(info->accessor(), info->map(), ast_id, return_id)
5654 info->accessor(), info->map(), ast_id, return_id, value);
5655 if (success)
return NULL;
5659 return BuildCallConstantFunction(info->accessor(), argument_count);
5662 ASSERT(info->lookup()->IsConstant());
5663 if (info->IsLoad()) {
5664 return New<HConstant>(info->constant());
5671 void HOptimizedGraphBuilder::HandlePolymorphicNamedFieldAccess(
5674 BailoutId return_id,
5677 SmallMapList* types,
5678 Handle<String> name) {
5681 HBasicBlock* join =
NULL;
5682 HBasicBlock* number_block =
NULL;
5683 bool handled_string =
false;
5685 bool handle_smi =
false;
5688 PropertyAccessInfo
info(
this, access_type,
ToType(types->at(i)), name);
5689 if (info.type()->Is(Type::String())) {
5690 if (handled_string)
continue;
5691 handled_string =
true;
5693 if (info.CanAccessMonomorphic()) {
5695 if (info.type()->Is(Type::Number())) {
5703 HControlInstruction* smi_check =
NULL;
5704 handled_string =
false;
5707 PropertyAccessInfo
info(
this, access_type,
ToType(types->at(i)), name);
5708 if (info.type()->Is(Type::String())) {
5709 if (handled_string)
continue;
5710 handled_string =
true;
5712 if (!info.CanAccessMonomorphic())
continue;
5715 join =
graph()->CreateBasicBlock();
5717 HBasicBlock* empty_smi_block =
graph()->CreateBasicBlock();
5718 HBasicBlock* not_smi_block =
graph()->CreateBasicBlock();
5719 number_block =
graph()->CreateBasicBlock();
5720 smi_check = New<HIsSmiAndBranch>(
5721 object, empty_smi_block, not_smi_block);
5730 HBasicBlock* if_true =
graph()->CreateBasicBlock();
5731 HBasicBlock* if_false =
graph()->CreateBasicBlock();
5732 HUnaryControlInstruction* compare;
5735 if (info.type()->Is(Type::Number())) {
5736 Handle<Map> heap_number_map =
isolate()->
factory()->heap_number_map();
5737 compare = New<HCompareMap>(object, heap_number_map, if_true, if_false);
5738 dependency = smi_check;
5739 }
else if (info.type()->Is(Type::String())) {
5740 compare = New<HIsStringAndBranch>(object, if_true, if_false);
5741 dependency = compare;
5743 compare = New<HCompareMap>(object, info.map(), if_true, if_false);
5744 dependency = compare;
5748 if (info.type()->Is(Type::Number())) {
5750 if_true = number_block;
5755 HInstruction* access = BuildMonomorphicAccess(
5756 &info,
object, dependency, value, ast_id,
5757 return_id, FLAG_polymorphic_inlining);
5759 HValue* result =
NULL;
5760 switch (access_type) {
5769 if (access ==
NULL) {
5770 if (HasStackOverflow())
return;
5783 if (count == types->length() && FLAG_deoptimize_uncommon_cases) {
5786 HInstruction* instr = BuildNamedGeneric(access_type,
object, name, value);
5800 if (join->HasPredecessor()) {
5801 join->SetJoinId(ast_id);
5810 static bool ComputeReceiverTypes(Expression* expr,
5814 SmallMapList* types = expr->GetReceiverTypes();
5816 bool monomorphic = expr->IsMonomorphic();
5817 if (types !=
NULL && receiver->HasMonomorphicJSObjectType()) {
5818 Map* root_map = receiver->GetMonomorphicJSObjectMap()->FindRootMap();
5819 types->FilterForPossibleTransitions(root_map);
5820 monomorphic = types->length() == 1;
5822 return monomorphic && CanInlinePropertyAccess(
5823 IC::MapToType<Type>(types->first(), zone));
5827 static bool AreStringTypes(SmallMapList* types) {
5828 for (
int i = 0; i < types->length(); i++) {
5835 void HOptimizedGraphBuilder::BuildStore(Expression* expr,
5838 BailoutId return_id,
5839 bool is_uninitialized) {
5840 if (!prop->key()->IsPropertyName()) {
5842 HValue* value =
environment()->ExpressionStackAt(0);
5843 HValue* key =
environment()->ExpressionStackAt(1);
5844 HValue*
object =
environment()->ExpressionStackAt(2);
5845 bool has_side_effects =
false;
5846 HandleKeyedElementAccess(
object, key, value, expr,
5847 STORE, &has_side_effects);
5855 HValue* value =
Pop();
5856 HValue*
object =
Pop();
5858 Literal* key = prop->key()->AsLiteral();
5862 HInstruction* instr = BuildNamedAccess(
STORE, ast_id, return_id, expr,
5863 object, name, value, is_uninitialized);
5864 if (instr ==
NULL)
return;
5868 if (instr->HasObservableSideEffects()) {
5876 void HOptimizedGraphBuilder::HandlePropertyAssignment(Assignment* expr) {
5877 Property* prop = expr->target()->AsProperty();
5880 if (!prop->key()->IsPropertyName()) {
5884 BuildStore(expr, prop, expr->id(),
5885 expr->AssignmentId(), expr->IsUninitialized());
5892 void HOptimizedGraphBuilder::HandleGlobalVariableAssignment(
5896 LookupResult lookup(
isolate());
5897 GlobalPropertyAccess type = LookupGlobalProperty(var, &lookup,
STORE);
5898 if (type == kUseCell) {
5899 Handle<GlobalObject> global(
current_info()->global_object());
5900 Handle<PropertyCell> cell(global->GetPropertyCell(&lookup));
5901 if (cell->type()->IsConstant()) {
5902 Handle<Object> constant = cell->type()->AsConstant();
5903 if (value->IsConstant()) {
5904 HConstant* c_value = HConstant::cast(value);
5905 if (!constant.is_identical_to(c_value->handle(isolate()))) {
5906 Add<HDeoptimize>(
"Constant global variable assignment",
5910 HValue* c_constant = Add<HConstant>(constant);
5911 IfBuilder builder(
this);
5912 if (constant->IsNumber()) {
5913 builder.If<HCompareNumericAndBranch>(value, c_constant,
Token::EQ);
5915 builder.If<HCompareObjectEqAndBranch>(value, c_constant);
5919 Add<HDeoptimize>(
"Constant global variable assignment",
5924 HInstruction* instr =
5925 Add<HStoreGlobalCell>(value, cell, lookup.GetPropertyDetails());
5926 if (instr->HasObservableSideEffects()) {
5930 HValue* global_object = Add<HLoadNamedField>(
5933 HStoreNamedGeneric* instr =
5934 Add<HStoreNamedGeneric>(global_object, var->name(),
5937 ASSERT(instr->HasObservableSideEffects());
5943 void HOptimizedGraphBuilder::HandleCompoundAssignment(Assignment* expr) {
5944 Expression* target = expr->target();
5945 VariableProxy* proxy = target->AsVariableProxy();
5946 Property* prop = target->AsProperty();
5951 BinaryOperation* operation = expr->binary_operation();
5953 if (proxy !=
NULL) {
5954 Variable* var = proxy->var();
5955 if (var->mode() ==
LET) {
5956 return Bailout(kUnsupportedLetCompoundAssignment);
5961 switch (var->location()) {
5963 HandleGlobalVariableAssignment(var,
5965 expr->AssignmentId());
5971 return Bailout(kUnsupportedConstCompoundAssignment);
5985 for (
int i = 0; i < count; ++i) {
5987 Bailout(kAssignmentToParameterFunctionUsesArgumentsObject);
5992 HStoreContextSlot::Mode
mode;
5994 switch (var->mode()) {
5996 mode = HStoreContextSlot::kCheckDeoptimize;
6005 mode = HStoreContextSlot::kNoCheck;
6008 HValue* context = BuildContextChainWalk(var);
6009 HStoreContextSlot* instr = Add<HStoreContextSlot>(
6011 if (instr->HasObservableSideEffects()) {
6018 return Bailout(kCompoundAssignmentToLookupSlot);
6022 }
else if (prop !=
NULL) {
6024 HValue*
object =
Top();
6026 if ((!prop->IsFunctionPrototype() && !prop->key()->IsPropertyName()) ||
6027 prop->IsStringAccess()) {
6035 HValue* right =
Pop();
6036 HValue* left =
Pop();
6038 Push(BuildBinaryOperation(operation, left, right, PUSH_BEFORE_SIMULATE));
6040 BuildStore(expr, prop, expr->id(),
6041 expr->AssignmentId(), expr->IsUninitialized());
6043 return Bailout(kInvalidLhsInCompoundAssignment);
6048 void HOptimizedGraphBuilder::VisitAssignment(Assignment* expr) {
6049 ASSERT(!HasStackOverflow());
6052 VariableProxy* proxy = expr->target()->AsVariableProxy();
6053 Property* prop = expr->target()->AsProperty();
6056 if (expr->is_compound()) {
6057 HandleCompoundAssignment(expr);
6062 HandlePropertyAssignment(expr);
6063 }
else if (proxy !=
NULL) {
6064 Variable* var = proxy->var();
6066 if (var->mode() ==
CONST) {
6067 if (expr->op() != Token::INIT_CONST) {
6068 return Bailout(kNonInitializerAssignmentToConst);
6071 if (expr->op() != Token::INIT_CONST_LEGACY) {
6076 if (var->IsStackAllocated()) {
6080 Add<HUseConst>(old_value);
6084 if (proxy->IsArguments())
return Bailout(kAssignmentToArguments);
6087 switch (var->location()) {
6090 HandleGlobalVariableAssignment(var,
6092 expr->AssignmentId());
6099 if (var->mode() ==
LET && expr->op() == Token::ASSIGN) {
6101 if (env_value ==
graph()->GetConstantHole()) {
6102 return Bailout(kAssignmentToLetVariableBeforeInitialization);
6109 HValue* value =
Pop();
6122 for (
int i = 0; i < count; ++i) {
6124 return Bailout(kAssignmentToParameterInArgumentsObject);
6130 HStoreContextSlot::Mode
mode;
6131 if (expr->op() == Token::ASSIGN) {
6132 switch (var->mode()) {
6134 mode = HStoreContextSlot::kCheckDeoptimize;
6143 mode = HStoreContextSlot::kNoCheck;
6145 }
else if (expr->op() == Token::INIT_VAR ||
6146 expr->op() == Token::INIT_LET ||
6147 expr->op() == Token::INIT_CONST) {
6148 mode = HStoreContextSlot::kNoCheck;
6150 ASSERT(expr->op() == Token::INIT_CONST_LEGACY);
6152 mode = HStoreContextSlot::kCheckIgnoreAssignment;
6155 HValue* context = BuildContextChainWalk(var);
6156 HStoreContextSlot* instr = Add<HStoreContextSlot>(
6158 if (instr->HasObservableSideEffects()) {
6165 return Bailout(kAssignmentToLOOKUPVariable);
6168 return Bailout(kInvalidLeftHandSideInAssignment);
6173 void HOptimizedGraphBuilder::VisitYield(Yield* expr) {
6179 void HOptimizedGraphBuilder::VisitThrow(Throw* expr) {
6180 ASSERT(!HasStackOverflow());
6191 Add<HPushArgument>(value);
6194 Add<HSimulate>(expr->id());
6206 if (string->IsConstant()) {
6207 HConstant* c_string = HConstant::cast(
string);
6208 if (c_string->HasStringValue()) {
6209 return Add<HConstant>(c_string->StringValue()->map()->instance_type());
6212 return Add<HLoadNamedField>(
6213 Add<HLoadNamedField>(string,
static_cast<HValue*
>(
NULL),
6214 HObjectAccess::ForMap()),
6215 static_cast<HValue*>(
NULL), HObjectAccess::ForMapInstanceType());
6220 if (string->IsConstant()) {
6221 HConstant* c_string = HConstant::cast(
string);
6222 if (c_string->HasStringValue()) {
6223 return Add<HConstant>(c_string->StringValue()->length());
6226 return Add<HLoadNamedField>(string,
static_cast<HValue*
>(
NULL),
6227 HObjectAccess::ForStringLength());
6231 HInstruction* HOptimizedGraphBuilder::BuildNamedGeneric(
6236 bool is_uninitialized) {
6237 if (is_uninitialized) {
6238 Add<HDeoptimize>(
"Insufficient type feedback for generic named access",
6241 if (access_type ==
LOAD) {
6242 return New<HLoadNamedGeneric>(object,
name);
6250 HInstruction* HOptimizedGraphBuilder::BuildKeyedGeneric(
6255 if (access_type ==
LOAD) {
6256 return New<HLoadKeyedGeneric>(object, key);
6267 isolate()->IsFastArrayConstructorPrototypeChainIntact()) {
6269 Handle<JSObject> object_prototype =
isolate()->initial_object_prototype();
6272 graph()->MarkDependsOnEmptyArrayProtoElements();
6279 HInstruction* HOptimizedGraphBuilder::BuildMonomorphicElementAccess(
6287 HCheckMaps* checked_object = Add<HCheckMaps>(object,
map,
top_info(),
6290 checked_object->ClearDependsOnFlag(kElementsKind);
6293 if (access_type ==
STORE && map->prototype()->IsJSObject()) {
6298 Object* holder = map->prototype();
6299 while (holder->GetPrototype(isolate())->IsJSObject()) {
6300 holder = holder->GetPrototype(
isolate());
6302 ASSERT(holder->GetPrototype(isolate())->IsNull());
6310 checked_object, key, val,
6312 map->elements_kind(), access_type,
6313 load_mode, store_mode);
6317 HInstruction* HOptimizedGraphBuilder::TryBuildConsolidatedElementLoad(
6321 SmallMapList* maps) {
6326 bool has_double_maps =
false;
6327 bool has_smi_or_object_maps =
false;
6328 bool has_js_array_access =
false;
6329 bool has_non_js_array_access =
false;
6330 bool has_seen_holey_elements =
false;
6331 Handle<Map> most_general_consolidated_map;
6332 for (
int i = 0; i < maps->length(); ++i) {
6333 Handle<Map> map = maps->at(i);
6334 if (!map->IsJSObjectMap())
return NULL;
6337 if (has_non_js_array_access)
return NULL;
6338 has_js_array_access =
true;
6339 }
else if (has_js_array_access) {
6342 has_non_js_array_access =
true;
6345 if (map->has_fast_double_elements()) {
6346 if (has_smi_or_object_maps)
return NULL;
6347 has_double_maps =
true;
6348 }
else if (map->has_fast_smi_or_object_elements()) {
6349 if (has_double_maps)
return NULL;
6350 has_smi_or_object_maps =
true;
6356 has_seen_holey_elements =
true;
6361 most_general_consolidated_map->elements_kind(),
6362 map->elements_kind())) {
6363 most_general_consolidated_map =
map;
6366 if (!has_double_maps && !has_smi_or_object_maps)
return NULL;
6368 HCheckMaps* checked_object = Add<HCheckMaps>(object, maps);
6371 ElementsKind consolidated_elements_kind = has_seen_holey_elements
6373 : most_general_consolidated_map->elements_kind();
6375 checked_object, key, val,
6376 most_general_consolidated_map->instance_type() ==
JS_ARRAY_TYPE,
6377 consolidated_elements_kind,
6383 HValue* HOptimizedGraphBuilder::HandlePolymorphicElementAccess(
6390 bool* has_side_effects) {
6391 *has_side_effects =
false;
6394 if (access_type ==
LOAD) {
6395 HInstruction* consolidated_load =
6396 TryBuildConsolidatedElementLoad(
object, key, val, maps);
6397 if (consolidated_load !=
NULL) {
6398 *has_side_effects |= consolidated_load->HasObservableSideEffects();
6399 return consolidated_load;
6407 for (
int i = 0; i < maps->length(); ++i) {
6408 Handle<Map> map = maps->at(i);
6412 possible_transitioned_maps.Add(map);
6415 HInstruction* result = BuildKeyedGeneric(access_type,
object, key, val);
6416 *has_side_effects = result->HasObservableSideEffects();
6421 for (
int i = 0; i < maps->length(); ++i) {
6422 Handle<Map> map = maps->at(i);
6423 Handle<Map> transitioned_map =
6424 map->FindTransitionedMap(&possible_transitioned_maps);
6425 transition_target.Add(transitioned_map);
6429 HTransitionElementsKind* transition =
NULL;
6430 for (
int i = 0; i < maps->length(); ++i) {
6431 Handle<Map> map = maps->at(i);
6433 if (!transition_target.at(i).is_null()) {
6435 map->elements_kind(),
6436 transition_target.at(i)->elements_kind()));
6437 transition = Add<HTransitionElementsKind>(object,
map,
6438 transition_target.at(i));
6440 untransitionable_maps.Add(map);
6446 ASSERT(untransitionable_maps.length() >= 1);
6447 if (untransitionable_maps.length() == 1) {
6448 Handle<Map> untransitionable_map = untransitionable_maps[0];
6449 HInstruction* instr =
NULL;
6450 if (untransitionable_map->has_slow_elements_kind() ||
6451 !untransitionable_map->IsJSObjectMap()) {
6452 instr =
AddInstruction(BuildKeyedGeneric(access_type,
object, key, val));
6454 instr = BuildMonomorphicElementAccess(
6455 object, key, val, transition, untransitionable_map, access_type,
6458 *has_side_effects |= instr->HasObservableSideEffects();
6459 return access_type ==
STORE ?
NULL : instr;
6462 HBasicBlock* join =
graph()->CreateBasicBlock();
6464 for (
int i = 0; i < untransitionable_maps.length(); ++i) {
6465 Handle<Map> map = untransitionable_maps[i];
6466 if (!map->IsJSObjectMap())
continue;
6468 HBasicBlock* this_map =
graph()->CreateBasicBlock();
6469 HBasicBlock* other_map =
graph()->CreateBasicBlock();
6470 HCompareMap* mapcompare =
6471 New<HCompareMap>(object,
map, this_map, other_map);
6475 HInstruction* access =
NULL;
6477 access =
AddInstruction(BuildKeyedGeneric(access_type,
object, key, val));
6485 mapcompare, key, val,
6487 elements_kind, access_type,
6494 if (access_type ==
LOAD) {
6497 NoObservableSideEffectsScope scope(
this);
6506 ASSERT(join->predecessors()->length() > 0);
6508 NoObservableSideEffectsScope scope(
this);
6515 HValue* HOptimizedGraphBuilder::HandleKeyedElementAccess(
6521 bool* has_side_effects) {
6522 ASSERT(!expr->IsPropertyName());
6523 HInstruction* instr =
NULL;
6525 SmallMapList* types;
6526 bool monomorphic = ComputeReceiverTypes(expr, obj, &types,
zone());
6528 bool force_generic =
false;
6529 if (access_type ==
STORE &&
6530 (monomorphic || (types !=
NULL && !types->is_empty()))) {
6535 for (
int i = 0; i < types->length(); i++) {
6536 Handle<Map> current_map = types->at(i);
6537 if (current_map->DictionaryElementsInPrototypeChainOnly()) {
6538 force_generic =
true;
6539 monomorphic =
false;
6546 Handle<Map> map = types->first();
6547 if (map->has_slow_elements_kind() || !map->IsJSObjectMap()) {
6548 instr =
AddInstruction(BuildKeyedGeneric(access_type, obj, key, val));
6551 instr = BuildMonomorphicElementAccess(
6552 obj, key, val,
NULL, map, access_type, expr->GetStoreMode());
6554 }
else if (!force_generic && (types !=
NULL && !types->is_empty())) {
6555 return HandlePolymorphicElementAccess(
6556 obj, key, val, types, access_type,
6557 expr->GetStoreMode(), has_side_effects);
6559 if (access_type ==
STORE) {
6560 if (expr->IsAssignment() &&
6561 expr->AsAssignment()->HasNoTypeInformation()) {
6562 Add<HDeoptimize>(
"Insufficient type feedback for keyed store",
6566 if (expr->AsProperty()->HasNoTypeInformation()) {
6567 Add<HDeoptimize>(
"Insufficient type feedback for keyed load",
6571 instr =
AddInstruction(BuildKeyedGeneric(access_type, obj, key, val));
6573 *has_side_effects = instr->HasObservableSideEffects();
6578 void HOptimizedGraphBuilder::EnsureArgumentsArePushedForAccess() {
6586 entry->set_arguments_pushed();
6588 HArgumentsObject* arguments = entry->arguments_object();
6589 const ZoneList<HValue*>* arguments_values = arguments->arguments_values();
6591 HInstruction* insert_after = entry;
6592 for (
int i = 0; i < arguments_values->length(); i++) {
6593 HValue* argument = arguments_values->at(i);
6594 HInstruction* push_argument = New<HPushArgument>(argument);
6595 push_argument->InsertAfter(insert_after);
6596 insert_after = push_argument;
6599 HArgumentsElements* arguments_elements = New<HArgumentsElements>(
true);
6601 arguments_elements->InsertAfter(insert_after);
6606 bool HOptimizedGraphBuilder::TryArgumentsAccess(Property* expr) {
6607 VariableProxy* proxy = expr->obj()->AsVariableProxy();
6608 if (proxy ==
NULL)
return false;
6609 if (!proxy->var()->IsStackAllocated())
return false;
6614 HInstruction* result =
NULL;
6615 if (expr->key()->IsPropertyName()) {
6616 Handle<String> name = expr->key()->AsLiteral()->AsPropertyName();
6620 HInstruction* elements = Add<HArgumentsElements>(
false);
6621 result = New<HArgumentsLength>(elements);
6625 arguments_environment()->parameter_count() - 1;
6626 result = New<HConstant>(argument_count);
6631 HValue* key =
Pop();
6634 HInstruction* elements = Add<HArgumentsElements>(
false);
6635 HInstruction* length = Add<HArgumentsLength>(elements);
6636 HInstruction* checked_key = Add<HBoundsCheck>(key, length);
6637 result = New<HAccessArgumentsAt>(elements, length, checked_key);
6639 EnsureArgumentsArePushedForAccess();
6644 arguments_environment()->parameter_count() - 1;
6645 HInstruction* length = Add<HConstant>(argument_count);
6646 HInstruction* checked_key = Add<HBoundsCheck>(key, length);
6647 result = New<HAccessArgumentsAt>(elements, length, checked_key);
6655 HInstruction* HOptimizedGraphBuilder::BuildNamedAccess(
6658 BailoutId return_id,
6661 Handle<String> name,
6663 bool is_uninitialized) {
6664 SmallMapList* types;
6665 ComputeReceiverTypes(expr,
object, &types,
zone());
6668 if (types->length() > 0) {
6669 PropertyAccessInfo
info(
this, access,
ToType(types->first()), name);
6670 if (!info.CanAccessAsMonomorphic(types)) {
6671 HandlePolymorphicNamedFieldAccess(
6672 access, ast_id, return_id,
object, value, types, name);
6676 HValue* checked_object;
6678 ASSERT(!info.type()->Is(Type::Number()));
6680 if (AreStringTypes(types)) {
6682 Add<HCheckInstanceType>(object, HCheckInstanceType::IS_STRING);
6684 checked_object = Add<HCheckMaps>(object, types);
6686 return BuildMonomorphicAccess(
6687 &info,
object, checked_object, value, ast_id, return_id);
6690 return BuildNamedGeneric(access,
object, name, value, is_uninitialized);
6694 void HOptimizedGraphBuilder::PushLoad(Property* expr,
6700 BuildLoad(expr, expr->LoadId());
6704 void HOptimizedGraphBuilder::BuildLoad(Property* expr,
6706 HInstruction* instr =
NULL;
6707 if (expr->IsStringAccess()) {
6708 HValue* index =
Pop();
6709 HValue*
string =
Pop();
6710 HInstruction* char_code = BuildStringCharCodeAt(
string, index);
6712 instr = NewUncasted<HStringCharFromCode>(char_code);
6714 }
else if (expr->IsFunctionPrototype()) {
6715 HValue*
function =
Pop();
6717 instr = New<HLoadFunctionPrototype>(
function);
6719 }
else if (expr->key()->IsPropertyName()) {
6720 Handle<String> name = expr->key()->AsLiteral()->AsPropertyName();
6721 HValue*
object =
Pop();
6723 instr = BuildNamedAccess(
LOAD, ast_id, expr->LoadId(), expr,
6724 object,
name,
NULL, expr->IsUninitialized());
6725 if (instr == NULL)
return;
6729 HValue* key =
Pop();
6730 HValue* obj =
Pop();
6732 bool has_side_effects =
false;
6733 HValue* load = HandleKeyedElementAccess(
6734 obj, key, NULL, expr,
LOAD, &has_side_effects);
6735 if (has_side_effects) {
6750 void HOptimizedGraphBuilder::VisitProperty(Property* expr) {
6751 ASSERT(!HasStackOverflow());
6755 if (TryArgumentsAccess(expr))
return;
6758 if ((!expr->IsFunctionPrototype() && !expr->key()->IsPropertyName()) ||
6759 expr->IsStringAccess()) {
6763 BuildLoad(expr, expr->id());
6769 HConstant* constant_value = New<HConstant>(constant);
6771 if (constant->map()->CanOmitMapChecks()) {
6772 constant->map()->AddDependentCompilationInfo(
6774 return constant_value;
6779 Add<HCheckMaps>(constant_value,
handle(constant->map()), info);
6780 check->ClearDependsOnFlag(kElementsKind);
6787 while (!prototype.is_identical_to(holder)) {
6794 return checked_object;
6798 void HOptimizedGraphBuilder::AddCheckPrototypeMaps(
Handle<JSObject> holder,
6807 HInstruction* HOptimizedGraphBuilder::NewPlainFunctionCall(
6808 HValue* fun,
int argument_count,
bool pass_argument_count) {
6809 return New<HCallJSFunction>(
6810 fun, argument_count, pass_argument_count);
6814 HInstruction* HOptimizedGraphBuilder::NewArgumentAdaptorCall(
6815 HValue* fun, HValue* context,
6816 int argument_count, HValue* expected_param_count) {
6817 CallInterfaceDescriptor* descriptor =
6820 HValue* arity = Add<HConstant>(argument_count - 1);
6822 HValue* op_vals[] = { fun,
context, arity, expected_param_count };
6824 Handle<Code> adaptor =
6826 HConstant* adaptor_value = Add<HConstant>(adaptor);
6828 return New<HCallWithDescriptor>(
6829 adaptor_value, argument_count, descriptor,
6830 Vector<HValue*>(op_vals, descriptor->environment_length()));
6834 HInstruction* HOptimizedGraphBuilder::BuildCallConstantFunction(
6835 Handle<JSFunction> jsfun,
int argument_count) {
6836 HValue* target = Add<HConstant>(jsfun);
6840 bool dont_adapt_arguments =
6841 (formal_parameter_count ==
6843 int arity = argument_count - 1;
6844 bool can_invoke_directly =
6845 dont_adapt_arguments || formal_parameter_count == arity;
6846 if (can_invoke_directly) {
6847 if (jsfun.is_identical_to(
current_info()->closure())) {
6848 graph()->MarkRecursive();
6850 return NewPlainFunctionCall(target, argument_count, dont_adapt_arguments);
6853 HValue* context = Add<HLoadNamedField>(
6854 target,
static_cast<HValue*
>(
NULL),
6855 HObjectAccess::ForFunctionContextPointer());
6856 return NewArgumentAdaptorCall(target, context,
6857 argument_count, param_count_value);
6864 void HOptimizedGraphBuilder::HandlePolymorphicCallNamed(
6867 SmallMapList* types,
6868 Handle<String> name) {
6869 int argument_count = expr->arguments()->length() + 1;
6872 bool handle_smi =
false;
6873 bool handled_string =
false;
6874 int ordered_functions = 0;
6880 if (info.CanAccessMonomorphic() &&
6881 info.lookup()->IsConstant() &&
6882 info.constant()->IsJSFunction()) {
6883 if (info.type()->Is(Type::String())) {
6884 if (handled_string)
continue;
6885 handled_string =
true;
6888 if (info.type()->Is(Type::Number())) {
6891 expr->set_target(target);
6892 order[ordered_functions++] = i;
6896 HBasicBlock* number_block =
NULL;
6897 HBasicBlock* join =
NULL;
6898 handled_string =
false;
6901 for (
int fn = 0; fn < ordered_functions; ++fn) {
6904 if (info.type()->Is(Type::String())) {
6905 if (handled_string)
continue;
6906 handled_string =
true;
6909 info.CanAccessMonomorphic();
6912 expr->set_target(target);
6915 join =
graph()->CreateBasicBlock();
6917 HBasicBlock* empty_smi_block =
graph()->CreateBasicBlock();
6918 HBasicBlock* not_smi_block =
graph()->CreateBasicBlock();
6919 number_block =
graph()->CreateBasicBlock();
6921 receiver, empty_smi_block, not_smi_block));
6929 HBasicBlock* if_true =
graph()->CreateBasicBlock();
6930 HBasicBlock* if_false =
graph()->CreateBasicBlock();
6931 HUnaryControlInstruction* compare;
6933 Handle<Map> map = info.map();
6934 if (info.type()->Is(Type::Number())) {
6935 Handle<Map> heap_number_map =
isolate()->
factory()->heap_number_map();
6936 compare = New<HCompareMap>(receiver, heap_number_map, if_true, if_false);
6937 }
else if (info.type()->Is(Type::String())) {
6938 compare = New<HIsStringAndBranch>(receiver, if_true, if_false);
6940 compare = New<HCompareMap>(receiver,
map, if_true, if_false);
6944 if (info.type()->Is(Type::Number())) {
6946 if_true = number_block;
6951 AddCheckPrototypeMaps(info.holder(),
map);
6953 HValue*
function = Add<HConstant>(expr->target());
6957 bool needs_wrapping = NeedsWrappingFor(info.type(), target);
6958 bool try_inline = FLAG_polymorphic_inlining && !needs_wrapping;
6959 if (FLAG_trace_inlining && try_inline) {
6961 SmartArrayPointer<char> caller_name =
6962 caller->shared()->DebugName()->ToCString();
6963 PrintF(
"Trying to inline the polymorphic call to %s from %s\n",
6964 name->ToCString().get(),
6967 if (try_inline && TryInlineCall(expr)) {
6970 if (HasStackOverflow())
return;
6976 HInstruction* call = needs_wrapping
6977 ? NewUncasted<HCallFunction>(
6979 : BuildCallConstantFunction(target, argument_count);
6993 if (ordered_functions == types->length() && FLAG_deoptimize_uncommon_cases) {
6996 Property* prop = expr->expression()->AsProperty();
6997 HInstruction*
function = BuildNamedGeneric(
6998 LOAD, receiver, name, NULL, prop->IsUninitialized());
7009 HInstruction* call = New<HCallFunction>(
7010 function, argument_count,
flags);
7029 if (join->HasPredecessor()) {
7031 join->SetJoinId(expr->id());
7039 void HOptimizedGraphBuilder::TraceInline(Handle<JSFunction> target,
7040 Handle<JSFunction> caller,
7041 const char* reason) {
7042 if (FLAG_trace_inlining) {
7043 SmartArrayPointer<char> target_name =
7044 target->shared()->DebugName()->ToCString();
7045 SmartArrayPointer<char> caller_name =
7046 caller->shared()->DebugName()->ToCString();
7047 if (reason == NULL) {
7048 PrintF(
"Inlined %s called from %s.\n", target_name.get(),
7051 PrintF(
"Did not inline %s called from %s (%s).\n",
7052 target_name.get(), caller_name.get(), reason);
7058 static const int kNotInlinable = 1000000000;
7061 int HOptimizedGraphBuilder::InliningAstSize(Handle<JSFunction> target) {
7062 if (!FLAG_use_inlining)
return kNotInlinable;
7067 Handle<SharedFunctionInfo> target_shared(target->shared());
7070 if (target->IsBuiltin()) {
7071 return target_shared->inline_builtin() ? 0 : kNotInlinable;
7076 if (target_shared->SourceSize() >
7078 TraceInline(target, caller,
"target text too big");
7079 return kNotInlinable;
7083 if (!target_shared->IsInlineable()) {
7084 TraceInline(target, caller,
"target not inlineable");
7085 return kNotInlinable;
7087 if (target_shared->dont_inline() || target_shared->dont_optimize()) {
7088 TraceInline(target, caller,
"target contains unsupported syntax [early]");
7089 return kNotInlinable;
7092 int nodes_added = target_shared->ast_node_count();
7097 bool HOptimizedGraphBuilder::TryInline(Handle<JSFunction> target,
7098 int arguments_count,
7099 HValue* implicit_return_value,
7101 BailoutId return_id,
7103 HSourcePosition position) {
7104 int nodes_added = InliningAstSize(target);
7105 if (nodes_added == kNotInlinable)
return false;
7110 TraceInline(target, caller,
"target AST is too large [early]");
7116 int current_level = 1;
7117 while (env->outer() !=
NULL) {
7118 if (current_level == FLAG_max_inlining_levels) {
7119 TraceInline(target, caller,
"inline depth limit reached");
7131 state = state->outer()) {
7132 if (*state->compilation_info()->closure() == *target) {
7133 TraceInline(target, caller,
"target is recursive");
7139 if (inlined_count_ >
Min(FLAG_max_inlined_nodes_cumulative,
7141 TraceInline(target, caller,
"cumulative AST node limit reached");
7146 CompilationInfo target_info(target,
zone());
7147 Handle<SharedFunctionInfo> target_shared(target->shared());
7149 if (target_info.isolate()->has_pending_exception()) {
7152 target_shared->DisableOptimization(kParseScopeError);
7154 TraceInline(target, caller,
"parse failure");
7158 if (target_info.scope()->num_heap_slots() > 0) {
7159 TraceInline(target, caller,
"target has context-allocated variables");
7162 FunctionLiteral*
function = target_info.function();
7166 nodes_added =
function->ast_node_count();
7168 TraceInline(target, caller,
"target AST is too large [late]");
7171 AstProperties::Flags*
flags(function->flags());
7172 if (flags->Contains(
kDontInline) ||
function->dont_optimize()) {
7173 TraceInline(target, caller,
"target contains unsupported syntax [late]");
7180 if (function->scope()->arguments() !=
NULL) {
7181 if (!FLAG_inline_arguments) {
7182 TraceInline(target, caller,
"target uses arguments object");
7186 if (!function->scope()->arguments()->IsStackAllocated()) {
7189 "target uses non-stackallocated arguments object");
7195 ZoneList<Declaration*>* decls = target_info.scope()->declarations();
7196 int decl_count = decls->length();
7197 for (
int i = 0; i < decl_count; ++i) {
7198 if (!decls->at(i)->IsInlineable()) {
7199 TraceInline(target, caller,
"target has non-trivial declaration");
7206 if (!target_shared->has_deoptimization_support()) {
7209 target_info.EnableDeoptimizationSupport();
7211 TraceInline(target, caller,
"could not generate deoptimization info");
7217 Handle<ScopeInfo> target_scope_info =
7219 target_shared->set_scope_info(*target_scope_info);
7221 target_shared->EnableDeoptimizationSupport(*target_info.code());
7232 ASSERT(target_shared->has_deoptimization_support());
7235 int function_id =
graph()->TraceInlinedFunction(target_shared, position);
7241 this, &target_info, inlining_kind, function_id);
7243 HConstant* undefined =
graph()->GetConstantUndefined();
7245 HEnvironment* inner_env =
7252 HConstant* context = Add<HConstant>(Handle<Context>(target->context()));
7253 inner_env->BindContext(context);
7255 Add<HSimulate>(return_id);
7257 HArgumentsObject* arguments_object =
NULL;
7261 if (function->scope()->arguments() !=
NULL) {
7262 ASSERT(function->scope()->arguments()->IsStackAllocated());
7263 HEnvironment* arguments_env = inner_env->arguments_environment();
7264 int arguments_count = arguments_env->parameter_count();
7265 arguments_object = Add<HArgumentsObject>(arguments_count);
7266 inner_env->Bind(function->scope()->arguments(), arguments_object);
7267 for (
int i = 0; i < arguments_count; i++) {
7268 arguments_object->AddArgument(arguments_env->Lookup(i),
zone());
7272 HEnterInlined* enter_inlined =
7273 Add<HEnterInlined>(target, arguments_count,
function,
7275 function->scope()->arguments(),
7281 if (HasStackOverflow()) {
7284 TraceInline(target, caller,
"inline graph construction failed");
7285 target_shared->DisableOptimization(kInliningBailedOut);
7286 inline_bailout_ =
true;
7287 delete target_state;
7292 inlined_count_ += nodes_added;
7294 Handle<Code> unoptimized_code(target_shared->code());
7295 ASSERT(unoptimized_code->kind() == Code::FUNCTION);
7296 Handle<TypeFeedbackInfo> type_info(
7298 graph()->update_type_change_checksum(type_info->own_type_change_checksum());
7300 TraceInline(target, caller, NULL);
7352 delete target_state;
7355 if (if_true->HasPredecessor()) {
7356 entry->RegisterReturnTarget(if_true,
zone());
7357 if_true->SetJoinId(ast_id);
7358 HBasicBlock* true_target = TestContext::cast(
ast_context())->if_true();
7361 if (if_false->HasPredecessor()) {
7362 entry->RegisterReturnTarget(if_false,
zone());
7363 if_false->SetJoinId(ast_id);
7364 HBasicBlock* false_target = TestContext::cast(
ast_context())->if_false();
7377 delete target_state;
7382 bool HOptimizedGraphBuilder::TryInlineCall(Call* expr) {
7383 return TryInline(expr->target(),
7384 expr->arguments()->length(),
7393 bool HOptimizedGraphBuilder::TryInlineConstruct(CallNew* expr,
7394 HValue* implicit_return_value) {
7395 return TryInline(expr->target(),
7396 expr->arguments()->length(),
7397 implicit_return_value,
7405 bool HOptimizedGraphBuilder::TryInlineGetter(Handle<JSFunction> getter,
7406 Handle<Map> receiver_map,
7408 BailoutId return_id) {
7409 if (TryInlineApiGetter(getter, receiver_map, ast_id))
return true;
7410 return TryInline(getter,
7420 bool HOptimizedGraphBuilder::TryInlineSetter(Handle<JSFunction> setter,
7421 Handle<Map> receiver_map,
7423 BailoutId assignment_id,
7424 HValue* implicit_return_value) {
7425 if (TryInlineApiSetter(setter, receiver_map,
id))
return true;
7426 return TryInline(setter,
7428 implicit_return_value,
7435 bool HOptimizedGraphBuilder::TryInlineApply(Handle<JSFunction>
function,
7437 int arguments_count) {
7438 return TryInline(
function,
7448 bool HOptimizedGraphBuilder::TryInlineBuiltinFunctionCall(Call* expr) {
7449 if (!expr->target()->shared()->HasBuiltinFunctionId())
return false;
7453 if (!FLAG_fast_math)
break;
7461 if (expr->arguments()->length() == 1) {
7462 HValue* argument =
Pop();
7464 HInstruction* op = NewUncasted<HUnaryMathOperation>(argument, id);
7470 if (expr->arguments()->length() == 2) {
7471 HValue* right =
Pop();
7472 HValue* left =
Pop();
7474 HInstruction* op = HMul::NewImul(
zone(),
context(), left, right);
7487 bool HOptimizedGraphBuilder::TryInlineBuiltinMethodCall(
7490 Handle<Map> receiver_map) {
7492 if (!expr->target()->shared()->HasBuiltinFunctionId())
return false;
7494 int argument_count = expr->arguments()->length() + 1;
7496 case kStringCharCodeAt:
7498 if (argument_count == 2) {
7499 HValue* index =
Pop();
7500 HValue*
string =
Pop();
7502 HInstruction* char_code =
7503 BuildStringCharCodeAt(
string, index);
7504 if (
id == kStringCharCodeAt) {
7509 HInstruction* result = NewUncasted<HStringCharFromCode>(char_code);
7514 case kStringFromCharCode:
7515 if (argument_count == 2) {
7516 HValue* argument =
Pop();
7518 HInstruction* result = NewUncasted<HStringCharFromCode>(argument);
7524 if (!FLAG_fast_math)
break;
7532 if (argument_count == 2) {
7533 HValue* argument =
Pop();
7535 HInstruction* op = NewUncasted<HUnaryMathOperation>(argument, id);
7541 if (argument_count == 3) {
7542 HValue* right =
Pop();
7543 HValue* left =
Pop();
7545 HInstruction* result =
NULL;
7547 if (right->IsConstant() && HConstant::cast(right)->HasDoubleValue()) {
7548 double exponent = HConstant::cast(right)->DoubleValue();
7549 if (exponent == 0.5) {
7550 result = NewUncasted<HUnaryMathOperation>(left,
kMathPowHalf);
7551 }
else if (exponent == -0.5) {
7552 HValue* one =
graph()->GetConstant1();
7553 HInstruction* sqrt = AddUncasted<HUnaryMathOperation>(
7557 ASSERT(!sqrt->HasObservableSideEffects());
7558 result = NewUncasted<HDiv>(one, sqrt);
7559 }
else if (exponent == 2.0) {
7560 result = NewUncasted<HMul>(left, left);
7564 if (result == NULL) {
7565 result = NewUncasted<HPower>(left, right);
7573 if (argument_count == 3) {
7574 HValue* right =
Pop();
7575 HValue* left =
Pop();
7577 HMathMinMax::Operation op = (
id == kMathMin) ? HMathMinMax::kMathMin
7578 : HMathMinMax::kMathMax;
7579 HInstruction* result = NewUncasted<HMathMinMax>(left, right, op);
7585 if (argument_count == 3) {
7586 HValue* right =
Pop();
7587 HValue* left =
Pop();
7589 HInstruction* result = HMul::NewImul(
zone(),
context(), left, right);
7595 if (receiver_map.is_null())
return false;
7596 if (receiver_map->instance_type() !=
JS_ARRAY_TYPE)
return false;
7597 ElementsKind elements_kind = receiver_map->elements_kind();
7600 Drop(expr->arguments()->length());
7602 HValue* reduced_length;
7603 HValue* receiver =
Pop();
7605 HValue* checked_object = AddCheckMap(receiver, receiver_map);
7606 HValue* length = Add<HLoadNamedField>(
7607 checked_object,
static_cast<HValue*
>(
NULL),
7608 HObjectAccess::ForArrayLength(elements_kind));
7612 { NoObservableSideEffectsScope scope(
this);
7613 IfBuilder length_checker(
this);
7615 HValue* bounds_check = length_checker.If<HCompareNumericAndBranch>(
7617 length_checker.Then();
7621 length_checker.Else();
7626 elements_kind, length);
7628 reduced_length = AddUncasted<HSub>(length,
graph()->GetConstant1());
7630 bounds_check, elements_kind,
LOAD);
7634 ? Add<HConstant>(factory->the_hole_value())
7635 : Add<HConstant>(nan_double);
7640 elements, reduced_length, hole, bounds_check, elements_kind,
STORE);
7641 Add<HStoreNamedField>(
7642 checked_object, HObjectAccess::ForArrayLength(elements_kind),
7647 length_checker.End();
7657 if (receiver_map.is_null())
return false;
7658 if (receiver_map->instance_type() !=
JS_ARRAY_TYPE)
return false;
7659 ElementsKind elements_kind = receiver_map->elements_kind();
7662 HValue* op_vals[] = {
7665 environment()->ExpressionStackAt(expr->arguments()->length())
7668 const int argc = expr->arguments()->length();
7672 CallInterfaceDescriptor* descriptor =
7675 ArrayPushStub stub(receiver_map->elements_kind(), argc);
7677 HConstant* code_value = Add<HConstant>(
code);
7680 descriptor->environment_length());
7682 HInstruction* call = New<HCallWithDescriptor>(
7683 code_value, argc + 1, descriptor,
7684 Vector<HValue*>(op_vals, descriptor->environment_length()));
7697 bool HOptimizedGraphBuilder::TryInlineApiFunctionCall(Call* expr,
7699 Handle<JSFunction>
function = expr->target();
7700 int argc = expr->arguments()->length();
7701 SmallMapList receiver_maps;
7702 return TryInlineApiCall(
function,
7711 bool HOptimizedGraphBuilder::TryInlineApiMethodCall(
7714 SmallMapList* receiver_maps) {
7715 Handle<JSFunction>
function = expr->target();
7716 int argc = expr->arguments()->length();
7717 return TryInlineApiCall(
function,
7726 bool HOptimizedGraphBuilder::TryInlineApiGetter(Handle<JSFunction>
function,
7727 Handle<Map> receiver_map,
7729 SmallMapList receiver_maps(1,
zone());
7730 receiver_maps.Add(receiver_map,
zone());
7731 return TryInlineApiCall(
function,
7740 bool HOptimizedGraphBuilder::TryInlineApiSetter(Handle<JSFunction>
function,
7741 Handle<Map> receiver_map,
7743 SmallMapList receiver_maps(1,
zone());
7744 receiver_maps.Add(receiver_map,
zone());
7745 return TryInlineApiCall(
function,
7754 bool HOptimizedGraphBuilder::TryInlineApiCall(Handle<JSFunction>
function,
7756 SmallMapList* receiver_maps,
7759 ApiCallType call_type) {
7760 CallOptimization optimization(
function);
7761 if (!optimization.is_simple_api_call())
return false;
7762 Handle<Map> holder_map;
7763 if (call_type == kCallApiFunction) {
7768 receiver_maps->Add(
handle(
7769 function->context()->global_object()->global_receiver()->map()),
7772 CallOptimization::HolderLookup holder_lookup =
7773 CallOptimization::kHolderNotFound;
7774 Handle<JSObject> api_holder = optimization.LookupHolderOfExpectedType(
7775 receiver_maps->first(), &holder_lookup);
7776 if (holder_lookup == CallOptimization::kHolderNotFound)
return false;
7778 if (FLAG_trace_inlining) {
7779 PrintF(
"Inlining api function ");
7780 function->ShortPrint();
7784 bool drop_extra =
false;
7785 bool is_store =
false;
7786 switch (call_type) {
7787 case kCallApiFunction:
7788 case kCallApiMethod:
7790 Add<HCheckMaps>(receiver, receiver_maps);
7792 if (holder_lookup == CallOptimization::kHolderFound) {
7793 AddCheckPrototypeMaps(api_holder, receiver_maps->first());
7795 ASSERT_EQ(holder_lookup, CallOptimization::kHolderIsReceiver);
7802 case kCallApiGetter:
7808 Add<HPushArgument>(receiver);
7810 case kCallApiSetter:
7817 HValue* value =
Pop();
7819 Add<HPushArgument>(receiver);
7820 Add<HPushArgument>(value);
7825 HValue* holder =
NULL;
7826 switch (holder_lookup) {
7827 case CallOptimization::kHolderFound:
7828 holder = Add<HConstant>(api_holder);
7830 case CallOptimization::kHolderIsReceiver:
7833 case CallOptimization::kHolderNotFound:
7837 Handle<CallHandlerInfo> api_call_info = optimization.api_call_info();
7838 Handle<Object> call_data_obj(api_call_info->data(),
isolate());
7839 bool call_data_is_undefined = call_data_obj->IsUndefined();
7840 HValue* call_data = Add<HConstant>(call_data_obj);
7841 ApiFunction fun(v8::ToCData<Address>(api_call_info->callback()));
7842 ExternalReference ref = ExternalReference(&fun,
7843 ExternalReference::DIRECT_API_CALL,
7845 HValue* api_function_address = Add<HConstant>(ExternalReference(ref));
7847 HValue* op_vals[] = {
7848 Add<HConstant>(
function),
7851 api_function_address,
7855 CallInterfaceDescriptor* descriptor =
7858 CallApiFunctionStub stub(is_store, call_data_is_undefined, argc);
7859 Handle<Code> code = stub.GetCode(
isolate());
7860 HConstant* code_value = Add<HConstant>(
code);
7863 descriptor->environment_length());
7865 HInstruction* call = New<HCallWithDescriptor>(
7866 code_value, argc + 1, descriptor,
7867 Vector<HValue*>(op_vals, descriptor->environment_length()));
7869 if (drop_extra)
Drop(1);
7875 bool HOptimizedGraphBuilder::TryCallApply(Call* expr) {
7876 ASSERT(expr->expression()->IsProperty());
7878 if (!expr->IsMonomorphic()) {
7881 Handle<Map> function_map = expr->GetReceiverTypes()->first();
7883 !expr->target()->shared()->HasBuiltinFunctionId() ||
7884 expr->target()->shared()->builtin_function_id() != kFunctionApply) {
7890 ZoneList<Expression*>* args = expr->arguments();
7891 if (args->length() != 2)
return false;
7893 VariableProxy* arg_two = args->at(1)->AsVariableProxy();
7894 if (arg_two == NULL || !arg_two->var()->IsStackAllocated())
return false;
7900 HValue* receiver =
Pop();
7901 HValue*
function =
Pop();
7905 HInstruction* elements = Add<HArgumentsElements>(
false);
7906 HInstruction* length = Add<HArgumentsLength>(elements);
7908 HInstruction* result = New<HApplyArguments>(
function,
7918 function_state()->entry()->arguments_object()->arguments_count());
7919 HArgumentsObject* args =
function_state()->entry()->arguments_object();
7920 const ZoneList<HValue*>* arguments_values = args->arguments_values();
7921 int arguments_count = arguments_values->length();
7924 for (
int i = 1; i < arguments_count; i++) {
7925 Push(arguments_values->at(i));
7928 Handle<JSFunction> known_function;
7929 if (function->IsConstant() &&
7930 HConstant::cast(
function)->handle(
isolate())->IsJSFunction()) {
7933 int args_count = arguments_count - 1;
7934 if (TryInlineApply(known_function, expr, args_count))
return true;
7938 HInvokeFunction* call = New<HInvokeFunction>(
7939 function, known_function, arguments_count);
7947 HValue* HOptimizedGraphBuilder::ImplicitReceiverFor(HValue*
function,
7948 Handle<JSFunction> target) {
7949 SharedFunctionInfo* shared = target->shared();
7950 if (shared->strict_mode() ==
SLOPPY && !shared->native()) {
7954 Handle<JSObject> global_receiver(
7955 target->context()->global_object()->global_receiver());
7956 return Add<HConstant>(global_receiver);
7958 return graph()->GetConstantUndefined();
7962 void HOptimizedGraphBuilder::VisitCall(Call* expr) {
7963 ASSERT(!HasStackOverflow());
7966 Expression* callee = expr->expression();
7967 int argument_count = expr->arguments()->length() + 1;
7968 HInstruction* call =
NULL;
7970 Property* prop = callee->AsProperty();
7973 HValue* receiver =
Top();
7975 SmallMapList* types;
7976 ComputeReceiverTypes(expr, receiver, &types,
zone());
7978 if (prop->key()->IsPropertyName() && types->length() > 0) {
7979 Handle<String> name = prop->key()->AsLiteral()->AsPropertyName();
7980 PropertyAccessInfo
info(
this,
LOAD,
ToType(types->first()), name);
7981 if (!info.CanAccessAsMonomorphic(types)) {
7982 HandlePolymorphicCallNamed(expr, receiver, types, name);
7988 if (!prop->key()->IsPropertyName()) {
7994 HValue*
function =
Pop();
8003 if (function->IsConstant() &&
8004 HConstant::cast(
function)->handle(
isolate())->IsJSFunction()) {
8007 expr->set_target(known_function);
8009 if (TryCallApply(expr))
return;
8012 Handle<Map> map = types->length() == 1 ? types->first() : Handle<Map>();
8013 if (TryInlineBuiltinMethodCall(expr, receiver, map)) {
8014 if (FLAG_trace_inlining) {
8015 PrintF(
"Inlining builtin ");
8016 known_function->ShortPrint();
8021 if (TryInlineApiMethodCall(expr, receiver, types))
return;
8024 if (NeedsWrappingFor(
ToType(types->first()), known_function)) {
8030 call = New<HCallFunction>(
8032 }
else if (TryInlineCall(expr)) {
8035 call = BuildCallConstantFunction(known_function, argument_count);
8042 call = New<HCallFunction>(
function, argument_count,
flags);
8047 VariableProxy* proxy = expr->expression()->AsVariableProxy();
8048 if (proxy != NULL && proxy->var()->is_possibly_eval(
isolate())) {
8049 return Bailout(kPossibleDirectCallToEval);
8055 HValue*
function =
Top();
8056 bool global_call = proxy != NULL && proxy->var()->IsUnallocated();
8058 Variable* var = proxy->var();
8059 bool known_global_function =
false;
8063 LookupResult lookup(
isolate());
8064 GlobalPropertyAccess type = LookupGlobalProperty(var, &lookup,
LOAD);
8065 if (type == kUseCell &&
8066 !
current_info()->global_object()->IsAccessCheckNeeded()) {
8067 Handle<GlobalObject> global(
current_info()->global_object());
8068 known_global_function = expr->ComputeGlobalTarget(global, &lookup);
8070 if (known_global_function) {
8071 Add<HCheckValue>(
function, expr->target());
8078 HValue* receiver = ImplicitReceiverFor(
function, expr->target());
8079 const int receiver_index = argument_count - 1;
8080 environment()->SetExpressionStackAt(receiver_index, receiver);
8082 if (TryInlineBuiltinFunctionCall(expr)) {
8083 if (FLAG_trace_inlining) {
8084 PrintF(
"Inlining builtin ");
8085 expr->target()->ShortPrint();
8090 if (TryInlineApiFunctionCall(expr, receiver))
return;
8091 if (TryInlineCall(expr))
return;
8094 call = BuildCallConstantFunction(expr->target(), argument_count);
8099 call = New<HCallFunction>(
function, argument_count);
8102 }
else if (expr->IsMonomorphic()) {
8103 Add<HCheckValue>(
function, expr->target());
8108 HValue* receiver = ImplicitReceiverFor(
function, expr->target());
8109 const int receiver_index = argument_count - 1;
8110 environment()->SetExpressionStackAt(receiver_index, receiver);
8112 if (TryInlineBuiltinFunctionCall(expr)) {
8113 if (FLAG_trace_inlining) {
8114 PrintF(
"Inlining builtin ");
8115 expr->target()->ShortPrint();
8120 if (TryInlineApiFunctionCall(expr, receiver))
return;
8122 if (TryInlineCall(expr))
return;
8125 function, expr->target(), argument_count));
8131 call = New<HCallFunction>(
function, argument_count);
8140 void HOptimizedGraphBuilder::BuildInlinedCallNewArray(CallNew* expr) {
8141 NoObservableSideEffectsScope no_effects(
this);
8143 int argument_count = expr->arguments()->length();
8145 HValue* constructor =
environment()->ExpressionStackAt(argument_count);
8148 Handle<AllocationSite> site = expr->allocation_site();
8154 HInstruction* site_instruction = Add<HConstant>(site);
8160 if (argument->IsConstant()) {
8161 HConstant* constant_argument = HConstant::cast(argument);
8162 ASSERT(constant_argument->HasSmiValue());
8163 int constant_array_size = constant_argument->Integer32Value();
8164 if (constant_array_size != 0) {
8171 JSArrayBuilder array_builder(
this,
8177 if (argument_count == 0) {
8178 new_object = array_builder.AllocateEmptyArray();
8179 }
else if (argument_count == 1) {
8183 HValue* length = Add<HConstant>(argument_count);
8190 ? JSArrayBuilder::FILL_WITH_HOLE
8191 : JSArrayBuilder::DONT_FILL_WITH_HOLE;
8192 new_object = array_builder.AllocateArray(length, length, fill_mode);
8193 HValue* elements = array_builder.GetElementsLocation();
8194 for (
int i = 0; i < argument_count; i++) {
8195 HValue* value =
environment()->ExpressionStackAt(argument_count - i - 1);
8196 HValue* constant_i = Add<HConstant>(i);
8197 Add<HStoreKeyed>(elements, constant_i, value, kind);
8201 Drop(argument_count + 1);
8207 static bool IsAllocationInlineable(Handle<JSFunction> constructor) {
8208 return constructor->has_initial_map() &&
8210 constructor->initial_map()->instance_size() < HAllocate::kMaxInlineSize &&
8211 constructor->initial_map()->InitialPropertiesLength() == 0;
8215 bool HOptimizedGraphBuilder::IsCallNewArrayInlineable(CallNew* expr) {
8217 Handle<JSFunction> target(
isolate()->native_context()->array_function(),
8219 int argument_count = expr->arguments()->length();
8222 Handle<AllocationSite> site = expr->allocation_site();
8225 bool inline_ok =
false;
8226 if (site->CanInlineCall()) {
8228 if (argument_count == 1) {
8229 HValue* argument =
Top();
8230 if (argument->IsConstant()) {
8233 HConstant* constant_argument = HConstant::cast(argument);
8234 if (constant_argument->HasSmiValue()) {
8235 int value = constant_argument->Integer32Value();
8236 inline_ok = value >= 0 &&
8239 TraceInline(target, caller,
8240 "Length outside of valid array range");
8250 TraceInline(target, caller,
"AllocationSite requested no inlining.");
8254 TraceInline(target, caller, NULL);
8260 void HOptimizedGraphBuilder::VisitCallNew(CallNew* expr) {
8261 ASSERT(!HasStackOverflow());
8265 int argument_count = expr->arguments()->length() + 1;
8271 HValue*
function =
Top();
8274 if (FLAG_inline_construct &&
8275 expr->IsMonomorphic() &&
8276 IsAllocationInlineable(expr->target())) {
8277 Handle<JSFunction> constructor = expr->target();
8278 HValue*
check = Add<HCheckValue>(
function, constructor);
8282 if (constructor->shared()->IsInobjectSlackTrackingInProgress()) {
8283 constructor->shared()->CompleteInobjectSlackTracking();
8287 ASSERT(constructor->has_initial_map());
8288 Handle<Map> initial_map(constructor->initial_map());
8289 int instance_size = initial_map->instance_size();
8290 ASSERT(initial_map->InitialPropertiesLength() == 0);
8293 HValue* size_in_bytes = Add<HConstant>(instance_size);
8294 HAllocationMode allocation_mode;
8295 if (FLAG_pretenuring_call_new) {
8296 if (FLAG_allocation_site_pretenuring) {
8298 Handle<AllocationSite> allocation_site = expr->allocation_site();
8299 allocation_mode = HAllocationMode(allocation_site);
8305 allocation_mode = HAllocationMode(
8306 isolate()->heap()->GetPretenureMode());
8310 HAllocate* receiver =
8313 receiver->set_known_initial_map(initial_map);
8316 HValue* constructor_value = Add<HConstant>(constructor);
8317 HValue* initial_map_value =
8318 Add<HLoadNamedField>(constructor_value,
static_cast<HValue*
>(
NULL),
8319 HObjectAccess::ForMapAndOffset(
8320 handle(constructor->map()),
8324 { NoObservableSideEffectsScope no_effects(
this);
8326 Add<HStoreNamedField>(receiver,
8329 HValue* empty_fixed_array = Add<HConstant>(factory->empty_fixed_array());
8330 Add<HStoreNamedField>(receiver,
8331 HObjectAccess::ForMapAndOffset(initial_map,
8334 Add<HStoreNamedField>(receiver,
8335 HObjectAccess::ForMapAndOffset(initial_map,
8338 if (initial_map->inobject_properties() != 0) {
8339 HConstant* undefined =
graph()->GetConstantUndefined();
8340 for (
int i = 0; i < initial_map->inobject_properties(); i++) {
8341 int property_offset = initial_map->GetInObjectPropertyOffset(i);
8342 Add<HStoreNamedField>(receiver,
8343 HObjectAccess::ForMapAndOffset(initial_map, property_offset),
8351 const int receiver_index = argument_count - 1;
8353 environment()->SetExpressionStackAt(receiver_index, receiver);
8355 if (TryInlineConstruct(expr, receiver))
return;
8362 while (instr != initial_map_value) {
8363 HInstruction* prev_instr = instr->previous();
8364 instr->DeleteAndReplaceWith(NULL);
8367 initial_map_value->DeleteAndReplaceWith(NULL);
8368 receiver->DeleteAndReplaceWith(NULL);
8369 check->DeleteAndReplaceWith(NULL);
8370 environment()->SetExpressionStackAt(receiver_index,
function);
8371 HInstruction* call =
8377 Handle<JSFunction> array_function(
8379 bool use_call_new_array = expr->target().is_identical_to(array_function);
8380 if (use_call_new_array && IsCallNewArrayInlineable(expr)) {
8382 Add<HCheckValue>(
function, array_function);
8383 BuildInlinedCallNewArray(expr);
8388 if (use_call_new_array) {
8389 Add<HCheckValue>(
function, array_function);
8390 call = New<HCallNewArray>(
function, argument_count,
8391 expr->elements_kind());
8393 call = New<HCallNew>(
function, argument_count);
8406 #define INLINE_FUNCTION_GENERATOR_ADDRESS(Name, argc, ressize) \
8407 &HOptimizedGraphBuilder::Generate##Name,
8414 #undef INLINE_FUNCTION_GENERATOR_ADDRESS
8417 template <
class ViewClass>
8421 HValue* byte_offset,
8422 HValue* byte_length) {
8424 for (
int offset = ViewClass::kSize;
8425 offset < ViewClass::kSizeWithInternalFields;
8427 Add<HStoreNamedField>(
obj,
8428 HObjectAccess::ForObservableJSObjectOffset(offset),
8429 graph()->GetConstant0());
8432 Add<HStoreNamedField>(
8434 HObjectAccess::ForJSArrayBufferViewByteOffset(),
8436 Add<HStoreNamedField>(
8438 HObjectAccess::ForJSArrayBufferViewByteLength(),
8441 if (buffer != NULL) {
8442 Add<HStoreNamedField>(
8444 HObjectAccess::ForJSArrayBufferViewBuffer(), buffer);
8445 HObjectAccess weak_first_view_access =
8446 HObjectAccess::ForJSArrayBufferWeakFirstView();
8447 Add<HStoreNamedField>(
obj,
8448 HObjectAccess::ForJSArrayBufferViewWeakNext(),
8449 Add<HLoadNamedField>(buffer,
8450 static_cast<HValue*
>(
NULL),
8451 weak_first_view_access));
8452 Add<HStoreNamedField>(buffer, weak_first_view_access,
obj);
8454 Add<HStoreNamedField>(
8456 HObjectAccess::ForJSArrayBufferViewBuffer(),
8457 Add<HConstant>(
static_cast<int32_t>(0)));
8458 Add<HStoreNamedField>(
obj,
8459 HObjectAccess::ForJSArrayBufferViewWeakNext(),
8460 graph()->GetConstantUndefined());
8465 void HOptimizedGraphBuilder::GenerateDataViewInitialize(
8466 CallRuntime* expr) {
8469 NoObservableSideEffectsScope scope(
this);
8470 ASSERT(arguments->length()== 4);
8472 HValue* obj =
Pop();
8475 HValue* buffer =
Pop();
8478 HValue* byte_offset =
Pop();
8481 HValue* byte_length =
Pop();
8483 BuildArrayBufferViewInitialization<JSDataView>(
8484 obj, buffer, byte_offset, byte_length);
8493 switch (array_type) {
8494 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
8495 case kExternal##Type##Array: \
8496 fun = Handle<JSFunction>(native_context->type##_array_fun()); \
8500 #undef TYPED_ARRAY_CASE
8502 Handle<Map>
map(fun->initial_map());
8507 HValue* HOptimizedGraphBuilder::BuildAllocateExternalElements(
8509 bool is_zero_byte_offset,
8510 HValue* buffer, HValue* byte_offset, HValue* length) {
8511 Handle<Map> external_array_map(
8512 isolate()->heap()->MapForExternalArrayType(array_type));
8518 external_array_map->instance_type());
8522 HValue* backing_store = Add<HLoadNamedField>(
8523 buffer,
static_cast<HValue*
>(
NULL),
8524 HObjectAccess::ForJSArrayBufferBackingStore());
8526 HValue* typed_array_start;
8527 if (is_zero_byte_offset) {
8528 typed_array_start = backing_store;
8530 HInstruction* external_pointer =
8531 AddUncasted<HAdd>(backing_store, byte_offset);
8535 typed_array_start = external_pointer;
8539 Add<HStoreNamedField>(elements,
8540 HObjectAccess::ForExternalArrayExternalPointer(),
8543 Add<HStoreNamedField>(elements,
8544 HObjectAccess::ForFixedArrayLength(), length);
8549 HValue* HOptimizedGraphBuilder::BuildAllocateFixedTypedArray(
8552 HValue* byte_length, HValue* length) {
8563 total_size = AddUncasted<HAdd>(byte_length,
8568 Handle<Map> fixed_typed_array_map(
8569 isolate()->heap()->MapForFixedTypedArray(array_type));
8571 Add<HAllocate>(total_size, HType::Tagged(),
8573 fixed_typed_array_map->instance_type());
8576 Add<HStoreNamedField>(elements,
8577 HObjectAccess::ForFixedArrayLength(),
8579 HValue* filler = Add<HConstant>(
static_cast<int32_t>(0));
8582 LoopBuilder builder(
this,
context(), LoopBuilder::kPostIncrement);
8584 HValue* key = builder.BeginBody(
8585 Add<HConstant>(static_cast<int32_t>(0)),
8587 Add<HStoreKeyed>(elements, key, filler, fixed_elements_kind);
8591 Add<HStoreNamedField>(
8592 elements, HObjectAccess::ForFixedArrayLength(), length);
8597 void HOptimizedGraphBuilder::GenerateTypedArrayInitialize(
8598 CallRuntime* expr) {
8599 ZoneList<Expression*>* arguments = expr->arguments();
8601 NoObservableSideEffectsScope scope(
this);
8602 static const int kObjectArg = 0;
8603 static const int kArrayIdArg = 1;
8604 static const int kBufferArg = 2;
8605 static const int kByteOffsetArg = 3;
8606 static const int kByteLengthArg = 4;
8607 static const int kArgsLength = 5;
8608 ASSERT(arguments->length() == kArgsLength);
8612 HValue* obj =
Pop();
8614 ASSERT(arguments->at(kArrayIdArg)->node_type() == AstNode::kLiteral);
8615 Handle<Object> value =
8616 static_cast<Literal*
>(arguments->at(kArrayIdArg))->value();
8621 if (!arguments->at(kBufferArg)->IsNullLiteral()) {
8628 HValue* byte_offset;
8629 bool is_zero_byte_offset;
8631 if (arguments->at(kByteOffsetArg)->node_type() == AstNode::kLiteral
8633 *
static_cast<Literal*
>(arguments->at(kByteOffsetArg))->value()) {
8634 byte_offset = Add<HConstant>(
static_cast<int32_t>(0));
8635 is_zero_byte_offset =
true;
8638 byte_offset =
Pop();
8639 is_zero_byte_offset =
false;
8644 HValue* byte_length =
Pop();
8646 IfBuilder byte_offset_smi(
this);
8648 if (!is_zero_byte_offset) {
8649 byte_offset_smi.If<HIsSmiAndBranch>(byte_offset);
8650 byte_offset_smi.Then();
8655 size_t element_size = 1;
8662 &external_elements_kind,
8663 &fixed_elements_kind,
8668 BuildArrayBufferViewInitialization<JSTypedArray>(
8669 obj, buffer, byte_offset, byte_length);
8672 HInstruction* length = AddUncasted<HDiv>(byte_length,
8673 Add<HConstant>(
static_cast<int32_t>(element_size)));
8675 Add<HStoreNamedField>(
obj,
8676 HObjectAccess::ForJSTypedArrayLength(),
8680 if (buffer != NULL) {
8681 elements = BuildAllocateExternalElements(
8682 array_type, is_zero_byte_offset, buffer, byte_offset, length);
8683 Handle<Map> obj_map = TypedArrayMap(
8684 isolate(), array_type, external_elements_kind);
8687 ASSERT(is_zero_byte_offset);
8688 elements = BuildAllocateFixedTypedArray(
8689 array_type, element_size, fixed_elements_kind,
8690 byte_length, length);
8692 Add<HStoreNamedField>(
8693 obj, HObjectAccess::ForElementsPointer(), elements);
8696 if (!is_zero_byte_offset) {
8697 byte_offset_smi.Else();
8705 Add<HCallRuntime>(expr->name(), expr->function(), kArgsLength);
8708 byte_offset_smi.End();
8712 void HOptimizedGraphBuilder::GenerateMaxSmi(CallRuntime* expr) {
8713 ASSERT(expr->arguments()->length() == 0);
8719 void HOptimizedGraphBuilder::GenerateTypedArrayMaxSizeInHeap(
8720 CallRuntime* expr) {
8721 ASSERT(expr->arguments()->length() == 0);
8722 HConstant* result = New<HConstant>(
static_cast<int32_t>(
8723 FLAG_typed_array_max_size_in_heap));
8728 void HOptimizedGraphBuilder::VisitCallRuntime(CallRuntime* expr) {
8729 ASSERT(!HasStackOverflow());
8732 if (expr->is_jsruntime()) {
8733 return Bailout(kCallToAJavaScriptRuntimeFunction);
8736 const Runtime::Function*
function = expr->function();
8737 ASSERT(
function != NULL);
8741 ASSERT(expr->name()->length() > 0);
8742 ASSERT(expr->name()->Get(0) ==
'_');
8744 int lookup_index =
static_cast<int>(
function->function_id) -
8746 ASSERT(lookup_index >= 0);
8747 ASSERT(static_cast<size_t>(lookup_index) <
8752 (this->*generator)(expr);
8755 Handle<String> name = expr->name();
8756 int argument_count = expr->arguments()->length();
8759 HCallRuntime* call = New<HCallRuntime>(
name,
function,
8766 void HOptimizedGraphBuilder::VisitUnaryOperation(UnaryOperation* expr) {
8767 ASSERT(!HasStackOverflow());
8770 switch (expr->op()) {
8781 Property* prop = expr->expression()->AsProperty();
8782 VariableProxy* proxy = expr->expression()->AsVariableProxy();
8786 HValue* key =
Pop();
8787 HValue* obj =
Pop();
8789 Add<HPushArgument>(
obj);
8790 Add<HPushArgument>(key);
8794 HInstruction* instr = New<HInvokeFunction>(
function, 3);
8796 }
else if (proxy != NULL) {
8799 Bailout(kDeleteWithGlobalVariable);
8804 HValue* value = var->
is_this()
8805 ?
graph()->GetConstantTrue()
8806 :
graph()->GetConstantFalse();
8809 Bailout(kDeleteWithNonGlobalVariable);
8828 HValue* value =
Pop();
8836 TestContext* context = TestContext::cast(
ast_context());
8838 context->if_false(),
8839 context->if_true());
8849 HBasicBlock* materialize_false =
graph()->CreateBasicBlock();
8850 HBasicBlock* materialize_true =
graph()->CreateBasicBlock();
8855 if (materialize_false->HasPredecessor()) {
8856 materialize_false->SetJoinId(expr->MaterializeFalseId());
8860 materialize_false =
NULL;
8863 if (materialize_true->HasPredecessor()) {
8864 materialize_true->SetJoinId(expr->MaterializeTrueId());
8868 materialize_true =
NULL;
8872 CreateJoin(materialize_false, materialize_true, expr->id());
8879 bool returns_original_input,
8880 CountOperation* expr) {
8887 if (returns_original_input) {
8892 HInstruction* number_input = AddUncasted<HForceRepresentation>(
Pop(), rep);
8903 HConstant* delta = (expr->op() == Token::INC)
8904 ?
graph()->GetConstant1()
8905 :
graph()->GetConstantMinus1();
8906 HInstruction* instr = AddUncasted<HAdd>(
Top(), delta);
8907 if (instr->IsAdd()) {
8908 HAdd* add = HAdd::cast(instr);
8909 add->set_observed_input_representation(1, rep);
8913 instr->ClearAllSideEffects();
8918 void HOptimizedGraphBuilder::BuildStoreForEffect(Expression* expr,
8921 BailoutId return_id,
8925 EffectContext for_effect(
this);
8927 if (key != NULL)
Push(key);
8929 BuildStore(expr, prop, ast_id, return_id);
8933 void HOptimizedGraphBuilder::VisitCountOperation(CountOperation* expr) {
8934 ASSERT(!HasStackOverflow());
8938 Expression* target = expr->expression();
8939 VariableProxy* proxy = target->AsVariableProxy();
8940 Property* prop = target->AsProperty();
8941 if (proxy == NULL && prop == NULL) {
8942 return Bailout(kInvalidLhsInCountOperation);
8948 bool returns_original_input =
8950 HValue* input =
NULL;
8951 HValue* after =
NULL;
8953 if (proxy != NULL) {
8954 Variable* var = proxy->var();
8956 return Bailout(kUnsupportedCountOperationWithConst);
8962 after = BuildIncrement(returns_original_input, expr);
8963 input = returns_original_input ?
Top() :
Pop();
8966 switch (var->location()) {
8968 HandleGlobalVariableAssignment(var,
8970 expr->AssignmentId());
8987 for (
int i = 0; i < count; ++i) {
8989 return Bailout(kAssignmentToParameterInArgumentsObject);
8994 HValue* context = BuildContextChainWalk(var);
8996 ? HStoreContextSlot::kCheckDeoptimize : HStoreContextSlot::kNoCheck;
8997 HStoreContextSlot* instr = Add<HStoreContextSlot>(
context, var->index(),
8999 if (instr->HasObservableSideEffects()) {
9006 return Bailout(kLookupVariableInCountOperation);
9009 Drop(returns_original_input ? 2 : 1);
9015 if (returns_original_input)
Push(
graph()->GetConstantUndefined());
9018 HValue*
object =
Top();
9021 if ((!prop->IsFunctionPrototype() && !prop->key()->IsPropertyName()) ||
9022 prop->IsStringAccess()) {
9029 after = BuildIncrement(returns_original_input, expr);
9031 if (returns_original_input) {
9034 Drop(key == NULL ? 1 : 2);
9037 expr, prop, expr->id(), expr->AssignmentId(), object, key, after));
9042 return BuildStore(expr, prop, expr->id(), expr->AssignmentId());
9046 HInstruction* HOptimizedGraphBuilder::BuildStringCharCodeAt(
9049 if (string->IsConstant() && index->IsConstant()) {
9050 HConstant* c_string = HConstant::cast(
string);
9051 HConstant* c_index = HConstant::cast(index);
9052 if (c_string->HasStringValue() && c_index->HasNumberValue()) {
9053 int32_t i = c_index->NumberValueAsInteger32();
9054 Handle<String> s = c_string->StringValue();
9055 if (i < 0 || i >= s->length()) {
9058 return New<HConstant>(s->Get(i));
9063 return New<HStringCharCodeAt>(string, index);
9069 static bool ShiftAmountsAllowReplaceByRotate(HValue* sa,
9070 HValue* const32_minus_sa) {
9071 if (sa->IsConstant() && const32_minus_sa->IsConstant()) {
9072 const HConstant* c1 = HConstant::cast(sa);
9073 const HConstant* c2 = HConstant::cast(const32_minus_sa);
9074 return c1->HasInteger32Value() && c2->HasInteger32Value() &&
9075 (c1->Integer32Value() + c2->Integer32Value() == 32);
9077 if (!const32_minus_sa->IsSub())
return false;
9078 HSub* sub = HSub::cast(const32_minus_sa);
9079 return sub->left()->EqualsInteger32Constant(32) && sub->right() == sa;
9090 HValue** shift_amount) {
9093 if (left->IsShl() && right->IsShr()) {
9094 shl = HShl::cast(left);
9095 shr = HShr::cast(right);
9096 }
else if (left->IsShr() && right->IsShl()) {
9097 shl = HShl::cast(right);
9098 shr = HShr::cast(left);
9102 if (shl->left() != shr->left())
return false;
9104 if (!ShiftAmountsAllowReplaceByRotate(shl->right(), shr->right()) &&
9105 !ShiftAmountsAllowReplaceByRotate(shr->right(), shl->right())) {
9108 *operand= shr->left();
9109 *shift_amount = shr->right();
9115 if (right->IsConstant()) {
9116 HConstant* right_const = HConstant::cast(right);
9117 if (right_const->HasInteger32Value() &&
9118 (right_const->Integer32Value() & 0x1f) != 0) {
9128 if (expected->
Is(Type::SignedSmall())) {
9131 if (expected->
Is(Type::Signed32())) {
9132 return AddUncasted<HForceRepresentation>(number,
9140 if (value->IsConstant()) {
9141 HConstant* constant = HConstant::cast(value);
9144 *expected = Type::Number(
zone());
9152 NoObservableSideEffectsScope no_effects(
this);
9154 Type* expected_type = *expected;
9157 Type* expected_obj =
9159 Type* expected_number =
9179 HValue* HOptimizedGraphBuilder::BuildBinaryOperation(
9180 BinaryOperation* expr,
9183 PushBeforeSimulateBehavior push_sim_result) {
9184 Type* left_type = expr->left()->bounds().lower;
9185 Type* right_type = expr->right()->bounds().lower;
9186 Type* result_type = expr->bounds().lower;
9187 Maybe<int> fixed_right_arg = expr->fixed_right_arg();
9190 PretenureFlag pretenure_flag = !FLAG_allocation_site_pretenuring ?
9193 HAllocationMode allocation_mode =
9194 FLAG_allocation_site_pretenuring
9196 ? HAllocationMode(NOT_TENURED)
9197 : HAllocationMode(allocation_site))
9198 : HAllocationMode(pretenure_flag);
9201 expr->op(), left, right, left_type, right_type, result_type,
9202 fixed_right_arg, allocation_mode);
9207 if (push_sim_result == PUSH_BEFORE_SIMULATE) {
9227 HAllocationMode allocation_mode) {
9233 (left_type->
Maybe(Type::String()) ||
9234 right_type->
Maybe(Type::String()));
9237 Add<HDeoptimize>(
"Insufficient type feedback for LHS of binary operation",
9241 left_type = Type::Any(
zone());
9248 Add<HDeoptimize>(
"Insufficient type feedback for RHS of binary operation",
9250 right_type = Type::Any(
zone());
9258 (left_type->
Is(Type::String()) || right_type->
Is(Type::String()))) {
9260 if (left_type->
Is(Type::String())) {
9265 if (right_type->
Is(Type::String())) {
9270 if (left_type->
Is(Type::Number())) {
9271 ASSERT(right_type->
Is(Type::String()));
9273 }
else if (!left_type->
Is(Type::String())) {
9274 ASSERT(right_type->
Is(Type::String()));
9276 Add<HPushArgument>(left);
9277 Add<HPushArgument>(right);
9278 return AddUncasted<HInvokeFunction>(
function, 2);
9282 if (right_type->
Is(Type::Number())) {
9283 ASSERT(left_type->
Is(Type::String()));
9285 }
else if (!right_type->
Is(Type::String())) {
9286 ASSERT(left_type->
Is(Type::String()));
9288 Add<HPushArgument>(left);
9289 Add<HPushArgument>(right);
9290 return AddUncasted<HInvokeFunction>(
function, 2);
9294 if (left->IsConstant() &&
9295 HConstant::cast(left)->HasStringValue() &&
9296 HConstant::cast(left)->StringValue()->length() == 0) {
9299 if (right->IsConstant() &&
9300 HConstant::cast(right)->HasStringValue() &&
9301 HConstant::cast(right)->StringValue()->length() == 0) {
9306 if (!allocation_mode.feedback_site().is_null()) {
9317 allocation_mode.CreateAllocationMementos()) ||
9318 (left->IsConstant() &&
9319 HConstant::cast(left)->HasStringValue() &&
9320 HConstant::cast(left)->StringValue()->length() + 1 >=
9322 (right->IsConstant() &&
9323 HConstant::cast(right)->HasStringValue() &&
9324 HConstant::cast(right)->StringValue()->length() + 1 >=
9330 return AddUncasted<HStringAdd>(
9331 left, right, allocation_mode.GetPretenureMode(),
9335 if (
graph()->info()->IsStub()) {
9342 bool is_non_primitive = (left_rep.
IsTagged() && !left_rep.
IsSmi()) ||
9349 if (
graph()->info()->IsStub() && is_non_primitive) {
9351 Add<HPushArgument>(left);
9352 Add<HPushArgument>(right);
9353 instr = AddUncasted<HInvokeFunction>(
function, 2);
9357 instr = AddUncasted<HAdd>(left, right);
9360 instr = AddUncasted<HSub>(left, right);
9363 instr = AddUncasted<HMul>(left, right);
9368 HConstant* fixed_right = Add<HConstant>(
9369 static_cast<int>(fixed_right_arg.
value));
9370 IfBuilder if_same(
this);
9373 if_same.ElseDeopt(
"Unexpected RHS of binary operation");
9374 right = fixed_right;
9376 instr = AddUncasted<HMod>(left, right);
9380 instr = AddUncasted<HDiv>(left, right);
9382 case Token::BIT_XOR:
9383 case Token::BIT_AND:
9384 instr = AddUncasted<HBitwise>(op, left, right);
9386 case Token::BIT_OR: {
9387 HValue* operand, *shift_amount;
9388 if (left_type->
Is(Type::Signed32()) &&
9389 right_type->
Is(Type::Signed32()) &&
9391 instr = AddUncasted<HRor>(operand, shift_amount);
9393 instr = AddUncasted<HBitwise>(op, left, right);
9398 instr = AddUncasted<HSar>(left, right);
9401 instr = AddUncasted<HShr>(left, right);
9402 if (FLAG_opt_safe_uint32_operations && instr->IsShr() &&
9404 graph()->RecordUint32Instruction(instr);
9408 instr = AddUncasted<HShl>(left, right);
9415 if (instr->IsBinaryOperation()) {
9424 if (left->IsForceRepresentation()) {
9428 if (right->IsForceRepresentation()) {
9439 static bool IsClassOfTest(CompareOperation* expr) {
9440 if (expr->op() != Token::EQ_STRICT)
return false;
9441 CallRuntime* call = expr->left()->AsCallRuntime();
9442 if (call == NULL)
return false;
9443 Literal* literal = expr->right()->AsLiteral();
9444 if (literal == NULL)
return false;
9445 if (!literal->value()->IsString())
return false;
9449 ASSERT(call->arguments()->length() == 1);
9454 void HOptimizedGraphBuilder::VisitBinaryOperation(BinaryOperation* expr) {
9455 ASSERT(!HasStackOverflow());
9458 switch (expr->op()) {
9474 Visit(expr->right());
9479 bool is_logical_and = expr->op() ==
Token::AND;
9481 TestContext* context = TestContext::cast(
ast_context());
9483 HBasicBlock* eval_right =
graph()->CreateBasicBlock();
9484 if (is_logical_and) {
9487 context->if_false()));
9496 if (eval_right->HasPredecessor()) {
9497 eval_right->SetJoinId(expr->RightId());
9499 Visit(expr->right());
9505 HValue* left_value =
Top();
9508 if (expr->left()->ToBooleanIsTrue() || expr->left()->ToBooleanIsFalse()) {
9513 if (is_logical_and == expr->left()->ToBooleanIsTrue()) {
9521 HBasicBlock* empty_block =
graph()->CreateBasicBlock();
9522 HBasicBlock* eval_right =
graph()->CreateBasicBlock();
9524 HBranch* test = is_logical_and
9525 ? New<HBranch>(left_value, expected, eval_right, empty_block)
9526 : New<HBranch>(left_value, expected, empty_block, eval_right);
9533 HBasicBlock* join_block =
9543 HBasicBlock* empty_block =
graph()->CreateBasicBlock();
9544 HBasicBlock* right_block =
graph()->CreateBasicBlock();
9545 if (is_logical_and) {
9557 if (empty_block->HasPredecessor()) {
9558 empty_block->SetJoinId(expr->id());
9563 if (right_block->HasPredecessor()) {
9564 right_block->SetJoinId(expr->RightId());
9572 HBasicBlock* join_block =
9573 CreateJoin(empty_block, right_block, expr->id());
9585 HValue* right =
Pop();
9586 HValue* left =
Pop();
9588 BuildBinaryOperation(expr, left, right,
9589 ast_context()->IsEffect() ? NO_PUSH_BEFORE_SIMULATE
9590 : PUSH_BEFORE_SIMULATE);
9591 if (FLAG_hydrogen_track_positions && result->IsBinaryOperation()) {
9601 void HOptimizedGraphBuilder::HandleLiteralCompareTypeof(CompareOperation* expr,
9606 HValue* value =
Pop();
9607 HTypeofIsAndBranch* instr = New<HTypeofIsAndBranch>(value,
check);
9612 static bool IsLiteralCompareBool(
Isolate* isolate,
9616 return op == Token::EQ_STRICT &&
9617 ((left->IsConstant() &&
9618 HConstant::cast(left)->handle(isolate)->IsBoolean()) ||
9619 (right->IsConstant() &&
9620 HConstant::cast(right)->handle(isolate)->IsBoolean()));
9624 void HOptimizedGraphBuilder::VisitCompareOperation(CompareOperation* expr) {
9625 ASSERT(!HasStackOverflow());
9634 Expression* sub_expr =
NULL;
9635 Handle<String>
check;
9636 if (expr->IsLiteralCompareTypeof(&sub_expr, &check)) {
9637 return HandleLiteralCompareTypeof(expr, sub_expr, check);
9639 if (expr->IsLiteralCompareUndefined(&sub_expr, isolate())) {
9642 if (expr->IsLiteralCompareNull(&sub_expr)) {
9643 return HandleLiteralCompareNil(expr, sub_expr,
kNullValue);
9646 if (IsClassOfTest(expr)) {
9647 CallRuntime* call = expr->left()->AsCallRuntime();
9648 ASSERT(call->arguments()->length() == 1);
9650 HValue* value =
Pop();
9651 Literal* literal = expr->right()->AsLiteral();
9653 HClassOfTestAndBranch* instr = New<HClassOfTestAndBranch>(value, rhs);
9657 Type* left_type = expr->left()->bounds().lower;
9658 Type* right_type = expr->right()->bounds().lower;
9659 Type* combined_type = expr->combined_type();
9666 HValue* right =
Pop();
9667 HValue* left =
Pop();
9670 if (IsLiteralCompareBool(
isolate(), left, op, right)) {
9671 HCompareObjectEqAndBranch* result =
9672 New<HCompareObjectEqAndBranch>(left, right);
9676 if (op == Token::INSTANCEOF) {
9681 VariableProxy* proxy = expr->right()->AsVariableProxy();
9682 bool global_function = (proxy !=
NULL) && proxy->var()->IsUnallocated();
9683 if (global_function &&
9685 !
current_info()->global_object()->IsAccessCheckNeeded()) {
9686 Handle<String> name = proxy->name();
9687 Handle<GlobalObject> global(
current_info()->global_object());
9688 LookupResult lookup(
isolate());
9689 global->Lookup(*name, &lookup);
9690 if (lookup.IsNormal() && lookup.GetValue()->IsJSFunction()) {
9694 if (!
isolate()->heap()->InNewSpace(*candidate)) {
9702 if (target.is_null()) {
9703 HInstanceOf* result = New<HInstanceOf>(left, right);
9706 Add<HCheckValue>(right, target);
9707 HInstanceOfKnownGlobal* result =
9708 New<HInstanceOfKnownGlobal>(left, target);
9716 Add<HPushArgument>(left);
9717 Add<HPushArgument>(right);
9720 HInstruction* result = New<HInvokeFunction>(
function, 2);
9724 PushBeforeSimulateBehavior push_behavior =
9726 : PUSH_BEFORE_SIMULATE;
9727 HControlInstruction* compare = BuildCompareInstruction(
9728 op, left, right, left_type, right_type, combined_type,
9731 push_behavior, expr->id());
9732 if (compare == NULL)
return;
9737 HControlInstruction* HOptimizedGraphBuilder::BuildCompareInstruction(
9743 Type* combined_type,
9744 HSourcePosition left_position,
9745 HSourcePosition right_position,
9746 PushBeforeSimulateBehavior push_sim_result,
9747 BailoutId bailout_id) {
9751 Add<HDeoptimize>(
"Insufficient type feedback for combined type "
9752 "of binary operation",
9754 combined_type = left_type = right_type = Type::Any(
zone());
9761 if (combined_type->Is(Type::Receiver())) {
9764 HValue* operand_to_check =
9765 left->block()->block_id() < right->block()->block_id() ? left : right;
9766 if (combined_type->IsClass()) {
9767 Handle<Map> map = combined_type->AsClass();
9768 AddCheckMap(operand_to_check, map);
9769 HCompareObjectEqAndBranch* result =
9770 New<HCompareObjectEqAndBranch>(left, right);
9771 if (FLAG_hydrogen_track_positions) {
9772 result->set_operand_position(
zone(), 0, left_position);
9773 result->set_operand_position(
zone(), 1, right_position);
9778 Add<HCheckInstanceType>(operand_to_check,
9779 HCheckInstanceType::IS_SPEC_OBJECT);
9780 HCompareObjectEqAndBranch* result =
9781 New<HCompareObjectEqAndBranch>(left, right);
9785 Bailout(kUnsupportedNonPrimitiveCompare);
9788 }
else if (combined_type->Is(Type::InternalizedString()) &&
9791 Add<HCheckInstanceType>(left, HCheckInstanceType::IS_INTERNALIZED_STRING);
9793 Add<HCheckInstanceType>(right, HCheckInstanceType::IS_INTERNALIZED_STRING);
9794 HCompareObjectEqAndBranch* result =
9795 New<HCompareObjectEqAndBranch>(left, right);
9797 }
else if (combined_type->Is(Type::String())) {
9799 Add<HCheckInstanceType>(left, HCheckInstanceType::IS_STRING);
9801 Add<HCheckInstanceType>(right, HCheckInstanceType::IS_STRING);
9802 HStringCompareAndBranch* result =
9803 New<HStringCompareAndBranch>(left, right, op);
9806 if (combined_rep.IsTagged() || combined_rep.IsNone()) {
9807 HCompareGeneric* result = Add<HCompareGeneric>(left, right, op);
9808 result->set_observed_input_representation(1, left_rep);
9809 result->set_observed_input_representation(2, right_rep);
9810 if (result->HasObservableSideEffects()) {
9811 if (push_sim_result == PUSH_BEFORE_SIMULATE) {
9820 HBranch* branch = New<HBranch>(result);
9823 HCompareNumericAndBranch* result =
9824 New<HCompareNumericAndBranch>(left, right, op);
9825 result->set_observed_input_representation(left_rep, right_rep);
9826 if (FLAG_hydrogen_track_positions) {
9827 result->SetOperandPositions(
zone(), left_position, right_position);
9835 void HOptimizedGraphBuilder::HandleLiteralCompareNil(CompareOperation* expr,
9836 Expression* sub_expr,
9838 ASSERT(!HasStackOverflow());
9844 HValue* value =
Pop();
9845 if (expr->op() == Token::EQ_STRICT) {
9847 ?
graph()->GetConstantNull()
9848 :
graph()->GetConstantUndefined();
9849 HCompareObjectEqAndBranch* instr =
9850 New<HCompareObjectEqAndBranch>(value, nil_constant);
9855 ? Type::Any(
zone()) : expr->combined_type();
9856 HIfContinuation continuation;
9863 HInstruction* HOptimizedGraphBuilder::BuildThisFunction() {
9867 return New<HConstant>(
9870 return New<HThisFunction>();
9875 HInstruction* HOptimizedGraphBuilder::BuildFastLiteral(
9876 Handle<JSObject> boilerplate_object,
9877 AllocationSiteUsageContext* site_context) {
9878 NoObservableSideEffectsScope no_effects(
this);
9879 InstanceType instance_type = boilerplate_object->map()->instance_type();
9883 ? HType::JSArray() : HType::JSObject();
9884 HValue* object_size_constant = Add<HConstant>(
9885 boilerplate_object->map()->instance_size());
9888 if (FLAG_allocation_site_pretenuring) {
9889 pretenure_flag = site_context->current()->GetPretenureMode();
9890 Handle<AllocationSite> site(site_context->current());
9895 HInstruction*
object = Add<HAllocate>(object_size_constant, type,
9896 pretenure_flag, instance_type, site_context->current());
9902 HConstant* empty_fixed_array = Add<HConstant>(
9904 Add<HStoreNamedField>(object, HObjectAccess::ForElementsPointer(),
9907 BuildEmitObjectHeader(boilerplate_object,
object);
9909 Handle<FixedArrayBase> elements(boilerplate_object->elements());
9910 int elements_size = (elements->length() > 0 &&
9911 elements->map() !=
isolate()->
heap()->fixed_cow_array_map()) ?
9912 elements->Size() : 0;
9914 if (pretenure_flag ==
TENURED &&
9915 elements->map() ==
isolate()->
heap()->fixed_cow_array_map() &&
9920 elements = Handle<FixedArrayBase>(
9922 Handle<FixedArray>::cast(elements)));
9923 boilerplate_object->set_elements(*elements);
9926 HInstruction* object_elements =
NULL;
9927 if (elements_size > 0) {
9928 HValue* object_elements_size = Add<HConstant>(elements_size);
9929 if (boilerplate_object->HasFastDoubleElements()) {
9930 object_elements = Add<HAllocate>(object_elements_size, HType::Tagged(),
9933 object_elements = Add<HAllocate>(object_elements_size, HType::Tagged(),
9937 BuildInitElementsInObjectHeader(boilerplate_object,
object, object_elements);
9940 if (object_elements != NULL) {
9941 BuildEmitElements(boilerplate_object, elements, object_elements,
9946 if (boilerplate_object->map()->NumberOfFields() != 0) {
9947 BuildEmitInObjectProperties(boilerplate_object,
object, site_context,
9954 void HOptimizedGraphBuilder::BuildEmitObjectHeader(
9955 Handle<JSObject> boilerplate_object,
9956 HInstruction*
object) {
9957 ASSERT(boilerplate_object->properties()->length() == 0);
9959 Handle<Map> boilerplate_object_map(boilerplate_object->map());
9962 Handle<Object> properties_field =
9963 Handle<Object>(boilerplate_object->properties(),
isolate());
9964 ASSERT(*properties_field ==
isolate()->heap()->empty_fixed_array());
9965 HInstruction* properties = Add<HConstant>(properties_field);
9966 HObjectAccess access = HObjectAccess::ForPropertiesPointer();
9967 Add<HStoreNamedField>(object, access, properties);
9969 if (boilerplate_object->IsJSArray()) {
9970 Handle<JSArray> boilerplate_array =
9972 Handle<Object> length_field =
9973 Handle<Object>(boilerplate_array->length(),
isolate());
9974 HInstruction* length = Add<HConstant>(length_field);
9976 ASSERT(boilerplate_array->length()->IsSmi());
9977 Add<HStoreNamedField>(object, HObjectAccess::ForArrayLength(
9978 boilerplate_array->GetElementsKind()), length);
9983 void HOptimizedGraphBuilder::BuildInitElementsInObjectHeader(
9984 Handle<JSObject> boilerplate_object,
9985 HInstruction*
object,
9986 HInstruction* object_elements) {
9987 ASSERT(boilerplate_object->properties()->length() == 0);
9988 if (object_elements == NULL) {
9989 Handle<Object> elements_field =
9990 Handle<Object>(boilerplate_object->elements(),
isolate());
9991 object_elements = Add<HConstant>(elements_field);
9993 Add<HStoreNamedField>(object, HObjectAccess::ForElementsPointer(),
9998 void HOptimizedGraphBuilder::BuildEmitInObjectProperties(
9999 Handle<JSObject> boilerplate_object,
10000 HInstruction*
object,
10001 AllocationSiteUsageContext* site_context,
10003 Handle<Map> boilerplate_map(boilerplate_object->map());
10004 Handle<DescriptorArray> descriptors(boilerplate_map->instance_descriptors());
10005 int limit = boilerplate_map->NumberOfOwnDescriptors();
10007 int copied_fields = 0;
10008 for (
int i = 0; i < limit; i++) {
10009 PropertyDetails details = descriptors->GetDetails(i);
10010 if (details.type() !=
FIELD)
continue;
10012 int index = descriptors->GetFieldIndex(i);
10013 int property_offset = boilerplate_object->GetInObjectPropertyOffset(index);
10014 Handle<Name>
name(descriptors->GetKey(i));
10015 Handle<Object> value =
10016 Handle<Object>(boilerplate_object->InObjectPropertyAt(index),
10020 HObjectAccess access = boilerplate_object->IsJSArray() ?
10021 HObjectAccess::ForJSArrayOffset(property_offset) :
10022 HObjectAccess::ForMapAndOffset(boilerplate_map, property_offset);
10024 if (value->IsJSObject()) {
10026 Handle<AllocationSite> current_site = site_context->EnterNewScope();
10027 HInstruction* result =
10028 BuildFastLiteral(value_object, site_context);
10029 site_context->ExitScope(current_site, value_object);
10030 Add<HStoreNamedField>(object, access, result);
10032 Representation representation = details.representation();
10033 HInstruction* value_instruction;
10035 if (representation.IsDouble()) {
10042 HInstruction* double_box =
10043 Add<HAllocate>(heap_number_constant, HType::HeapNumber(),
10046 isolate()->factory()->heap_number_map());
10047 Add<HStoreNamedField>(double_box, HObjectAccess::ForHeapNumberValue(),
10048 Add<HConstant>(value));
10049 value_instruction = double_box;
10050 }
else if (representation.IsSmi()) {
10051 value_instruction = value->IsUninitialized()
10052 ?
graph()->GetConstant0()
10053 : Add<HConstant>(value);
10055 access = access.WithRepresentation(representation);
10057 value_instruction = Add<HConstant>(value);
10060 Add<HStoreNamedField>(object, access, value_instruction);
10064 int inobject_properties = boilerplate_object->map()->inobject_properties();
10065 HInstruction* value_instruction =
10067 for (
int i = copied_fields; i < inobject_properties; i++) {
10068 ASSERT(boilerplate_object->IsJSObject());
10069 int property_offset = boilerplate_object->GetInObjectPropertyOffset(i);
10070 HObjectAccess access =
10071 HObjectAccess::ForMapAndOffset(boilerplate_map, property_offset);
10072 Add<HStoreNamedField>(object, access, value_instruction);
10077 void HOptimizedGraphBuilder::BuildEmitElements(
10078 Handle<JSObject> boilerplate_object,
10079 Handle<FixedArrayBase> elements,
10080 HValue* object_elements,
10081 AllocationSiteUsageContext* site_context) {
10082 ElementsKind kind = boilerplate_object->map()->elements_kind();
10083 int elements_length = elements->length();
10084 HValue* object_elements_length = Add<HConstant>(elements_length);
10088 if (elements->IsFixedDoubleArray()) {
10089 BuildEmitFixedDoubleArray(elements, kind, object_elements);
10090 }
else if (elements->IsFixedArray()) {
10091 BuildEmitFixedArray(elements, kind, object_elements,
10099 void HOptimizedGraphBuilder::BuildEmitFixedDoubleArray(
10100 Handle<FixedArrayBase> elements,
10102 HValue* object_elements) {
10103 HInstruction* boilerplate_elements = Add<HConstant>(elements);
10104 int elements_length = elements->length();
10105 for (
int i = 0; i < elements_length; i++) {
10106 HValue* key_constant = Add<HConstant>(i);
10107 HInstruction* value_instruction =
10108 Add<HLoadKeyed>(boilerplate_elements, key_constant,
10109 static_cast<HValue*
>(
NULL), kind,
10111 HInstruction* store = Add<HStoreKeyed>(object_elements, key_constant,
10112 value_instruction, kind);
10118 void HOptimizedGraphBuilder::BuildEmitFixedArray(
10119 Handle<FixedArrayBase> elements,
10121 HValue* object_elements,
10122 AllocationSiteUsageContext* site_context) {
10123 HInstruction* boilerplate_elements = Add<HConstant>(elements);
10124 int elements_length = elements->length();
10126 for (
int i = 0; i < elements_length; i++) {
10127 Handle<Object> value(fast_elements->get(i),
isolate());
10128 HValue* key_constant = Add<HConstant>(i);
10129 if (value->IsJSObject()) {
10131 Handle<AllocationSite> current_site = site_context->EnterNewScope();
10132 HInstruction* result =
10133 BuildFastLiteral(value_object, site_context);
10134 site_context->ExitScope(current_site, value_object);
10135 Add<HStoreKeyed>(object_elements, key_constant, result, kind);
10137 HInstruction* value_instruction =
10138 Add<HLoadKeyed>(boilerplate_elements, key_constant,
10139 static_cast<HValue*
>(
NULL), kind,
10141 Add<HStoreKeyed>(object_elements, key_constant, value_instruction, kind);
10147 void HOptimizedGraphBuilder::VisitThisFunction(ThisFunction* expr) {
10148 ASSERT(!HasStackOverflow());
10151 HInstruction* instr = BuildThisFunction();
10158 ASSERT(globals_.is_empty());
10159 AstVisitor::VisitDeclarations(declarations);
10160 if (!globals_.is_empty()) {
10163 for (
int i = 0; i < globals_.length(); ++i) array->set(i, *globals_.at(i));
10167 Add<HDeclareGlobals>(array,
flags);
10173 void HOptimizedGraphBuilder::VisitVariableDeclaration(
10174 VariableDeclaration* declaration) {
10175 VariableProxy* proxy = declaration->proxy();
10177 Variable* variable = proxy->var();
10181 globals_.Add(variable->
name(),
zone());
10189 HValue* value =
graph()->GetConstantHole();
10195 HValue* value =
graph()->GetConstantHole();
10197 HStoreContextSlot* store = Add<HStoreContextSlot>(
10198 context, variable->
index(), HStoreContextSlot::kNoCheck, value);
10199 if (store->HasObservableSideEffects()) {
10205 return Bailout(kUnsupportedLookupSlotInDeclaration);
10210 void HOptimizedGraphBuilder::VisitFunctionDeclaration(
10211 FunctionDeclaration* declaration) {
10212 VariableProxy* proxy = declaration->proxy();
10213 Variable* variable = proxy->var();
10214 switch (variable->location()) {
10216 globals_.Add(variable->name(),
zone());
10220 if (
function.is_null())
return SetStackOverflow();
10221 globals_.Add(
function,
zone());
10227 HValue* value =
Pop();
10233 HValue* value =
Pop();
10235 HStoreContextSlot* store = Add<HStoreContextSlot>(
10236 context, variable->index(), HStoreContextSlot::kNoCheck, value);
10237 if (store->HasObservableSideEffects()) {
10243 return Bailout(kUnsupportedLookupSlotInDeclaration);
10248 void HOptimizedGraphBuilder::VisitModuleDeclaration(
10249 ModuleDeclaration* declaration) {
10254 void HOptimizedGraphBuilder::VisitImportDeclaration(
10255 ImportDeclaration* declaration) {
10260 void HOptimizedGraphBuilder::VisitExportDeclaration(
10261 ExportDeclaration* declaration) {
10266 void HOptimizedGraphBuilder::VisitModuleLiteral(ModuleLiteral* module) {
10271 void HOptimizedGraphBuilder::VisitModuleVariable(ModuleVariable* module) {
10276 void HOptimizedGraphBuilder::VisitModulePath(ModulePath* module) {
10281 void HOptimizedGraphBuilder::VisitModuleUrl(ModuleUrl* module) {
10286 void HOptimizedGraphBuilder::VisitModuleStatement(ModuleStatement* stmt) {
10293 void HOptimizedGraphBuilder::GenerateIsSmi(CallRuntime* call) {
10294 ASSERT(call->arguments()->length() == 1);
10296 HValue* value =
Pop();
10297 HIsSmiAndBranch* result = New<HIsSmiAndBranch>(value);
10302 void HOptimizedGraphBuilder::GenerateIsSpecObject(CallRuntime* call) {
10303 ASSERT(call->arguments()->length() == 1);
10305 HValue* value =
Pop();
10306 HHasInstanceTypeAndBranch* result =
10307 New<HHasInstanceTypeAndBranch>(value,
10314 void HOptimizedGraphBuilder::GenerateIsFunction(CallRuntime* call) {
10315 ASSERT(call->arguments()->length() == 1);
10317 HValue* value =
Pop();
10318 HHasInstanceTypeAndBranch* result =
10324 void HOptimizedGraphBuilder::GenerateIsMinusZero(CallRuntime* call) {
10325 ASSERT(call->arguments()->length() == 1);
10327 HValue* value =
Pop();
10328 HCompareMinusZeroAndBranch* result = New<HCompareMinusZeroAndBranch>(value);
10333 void HOptimizedGraphBuilder::GenerateHasCachedArrayIndex(CallRuntime* call) {
10334 ASSERT(call->arguments()->length() == 1);
10336 HValue* value =
Pop();
10337 HHasCachedArrayIndexAndBranch* result =
10338 New<HHasCachedArrayIndexAndBranch>(value);
10343 void HOptimizedGraphBuilder::GenerateIsArray(CallRuntime* call) {
10344 ASSERT(call->arguments()->length() == 1);
10346 HValue* value =
Pop();
10347 HHasInstanceTypeAndBranch* result =
10353 void HOptimizedGraphBuilder::GenerateIsRegExp(CallRuntime* call) {
10354 ASSERT(call->arguments()->length() == 1);
10356 HValue* value =
Pop();
10357 HHasInstanceTypeAndBranch* result =
10363 void HOptimizedGraphBuilder::GenerateIsObject(CallRuntime* call) {
10364 ASSERT(call->arguments()->length() == 1);
10366 HValue* value =
Pop();
10367 HIsObjectAndBranch* result = New<HIsObjectAndBranch>(value);
10372 void HOptimizedGraphBuilder::GenerateIsNonNegativeSmi(CallRuntime* call) {
10373 return Bailout(kInlinedRuntimeFunctionIsNonNegativeSmi);
10377 void HOptimizedGraphBuilder::GenerateIsUndetectableObject(CallRuntime* call) {
10378 ASSERT(call->arguments()->length() == 1);
10380 HValue* value =
Pop();
10381 HIsUndetectableAndBranch* result = New<HIsUndetectableAndBranch>(value);
10386 void HOptimizedGraphBuilder::GenerateIsStringWrapperSafeForDefaultValueOf(
10387 CallRuntime* call) {
10388 return Bailout(kInlinedRuntimeFunctionIsStringWrapperSafeForDefaultValueOf);
10393 void HOptimizedGraphBuilder::GenerateIsConstructCall(CallRuntime* call) {
10394 ASSERT(call->arguments()->length() == 0);
10398 ?
graph()->GetConstantTrue()
10399 :
graph()->GetConstantFalse();
10409 void HOptimizedGraphBuilder::GenerateArgumentsLength(CallRuntime* call) {
10414 ASSERT(call->arguments()->length() == 0);
10415 HInstruction* elements = Add<HArgumentsElements>(
false);
10416 HArgumentsLength* result = New<HArgumentsLength>(elements);
10421 void HOptimizedGraphBuilder::GenerateArguments(CallRuntime* call) {
10426 ASSERT(call->arguments()->length() == 1);
10428 HValue* index =
Pop();
10429 HInstruction* elements = Add<HArgumentsElements>(
false);
10430 HInstruction* length = Add<HArgumentsLength>(elements);
10431 HInstruction* checked_index = Add<HBoundsCheck>(index, length);
10432 HAccessArgumentsAt* result = New<HAccessArgumentsAt>(
10433 elements, length, checked_index);
10439 void HOptimizedGraphBuilder::GenerateClassOf(CallRuntime* call) {
10442 return Bailout(kInlinedRuntimeFunctionClassOf);
10446 void HOptimizedGraphBuilder::GenerateValueOf(CallRuntime* call) {
10447 ASSERT(call->arguments()->length() == 1);
10449 HValue*
object =
Pop();
10451 IfBuilder if_objectisvalue(
this);
10452 HValue* objectisvalue = if_objectisvalue.If<HHasInstanceTypeAndBranch>(
10454 if_objectisvalue.Then();
10457 Push(Add<HLoadNamedField>(
10458 object, objectisvalue,
10459 HObjectAccess::ForObservableJSObjectOffset(
10463 if_objectisvalue.Else();
10469 if_objectisvalue.End();
10474 void HOptimizedGraphBuilder::GenerateDateField(CallRuntime* call) {
10475 ASSERT(call->arguments()->length() == 2);
10476 ASSERT_NE(NULL, call->arguments()->at(1)->AsLiteral());
10477 Smi* index =
Smi::cast(*(call->arguments()->at(1)->AsLiteral()->value()));
10479 HValue* date =
Pop();
10480 HDateField* result = New<HDateField>(date, index);
10485 void HOptimizedGraphBuilder::GenerateOneByteSeqStringSetChar(
10486 CallRuntime* call) {
10487 ASSERT(call->arguments()->length() == 3);
10492 HValue*
string =
Pop();
10493 HValue* value =
Pop();
10494 HValue* index =
Pop();
10502 void HOptimizedGraphBuilder::GenerateTwoByteSeqStringSetChar(
10503 CallRuntime* call) {
10504 ASSERT(call->arguments()->length() == 3);
10509 HValue*
string =
Pop();
10510 HValue* value =
Pop();
10511 HValue* index =
Pop();
10519 void HOptimizedGraphBuilder::GenerateSetValueOf(CallRuntime* call) {
10520 ASSERT(call->arguments()->length() == 2);
10523 HValue* value =
Pop();
10524 HValue*
object =
Pop();
10527 IfBuilder if_objectisvalue(
this);
10528 if_objectisvalue.If<HHasInstanceTypeAndBranch>(object,
JS_VALUE_TYPE);
10529 if_objectisvalue.Then();
10532 Add<HStoreNamedField>(object,
10540 if_objectisvalue.Else();
10548 if_objectisvalue.End();
10557 void HOptimizedGraphBuilder::GenerateStringCharCodeAt(CallRuntime* call) {
10558 ASSERT(call->arguments()->length() == 2);
10561 HValue* index =
Pop();
10562 HValue*
string =
Pop();
10563 HInstruction* result = BuildStringCharCodeAt(
string, index);
10569 void HOptimizedGraphBuilder::GenerateStringCharFromCode(CallRuntime* call) {
10570 ASSERT(call->arguments()->length() == 1);
10572 HValue* char_code =
Pop();
10573 HInstruction* result = NewUncasted<HStringCharFromCode>(char_code);
10579 void HOptimizedGraphBuilder::GenerateStringCharAt(CallRuntime* call) {
10580 ASSERT(call->arguments()->length() == 2);
10583 HValue* index =
Pop();
10584 HValue*
string =
Pop();
10585 HInstruction* char_code = BuildStringCharCodeAt(
string, index);
10587 HInstruction* result = NewUncasted<HStringCharFromCode>(char_code);
10593 void HOptimizedGraphBuilder::GenerateObjectEquals(CallRuntime* call) {
10594 ASSERT(call->arguments()->length() == 2);
10597 HValue* right =
Pop();
10598 HValue* left =
Pop();
10599 HCompareObjectEqAndBranch* result =
10600 New<HCompareObjectEqAndBranch>(left, right);
10605 void HOptimizedGraphBuilder::GenerateLog(CallRuntime* call) {
10612 void HOptimizedGraphBuilder::GenerateStringAdd(CallRuntime* call) {
10613 ASSERT_EQ(2, call->arguments()->length());
10616 HValue* right =
Pop();
10617 HValue* left =
Pop();
10618 HInstruction* result = NewUncasted<HStringAdd>(left, right);
10624 void HOptimizedGraphBuilder::GenerateSubString(CallRuntime* call) {
10625 ASSERT_EQ(3, call->arguments()->length());
10628 HCallStub* result = New<HCallStub>(CodeStub::SubString, 3);
10634 void HOptimizedGraphBuilder::GenerateStringCompare(CallRuntime* call) {
10635 ASSERT_EQ(2, call->arguments()->length());
10638 HCallStub* result = New<HCallStub>(CodeStub::StringCompare, 2);
10644 void HOptimizedGraphBuilder::GenerateRegExpExec(CallRuntime* call) {
10645 ASSERT_EQ(4, call->arguments()->length());
10648 HCallStub* result = New<HCallStub>(CodeStub::RegExpExec, 4);
10653 void HOptimizedGraphBuilder::GenerateDoubleLo(CallRuntime* call) {
10654 ASSERT_EQ(1, call->arguments()->length());
10656 HValue* value =
Pop();
10657 HInstruction* result = NewUncasted<HDoubleBits>(value, HDoubleBits::LOW);
10662 void HOptimizedGraphBuilder::GenerateDoubleHi(CallRuntime* call) {
10663 ASSERT_EQ(1, call->arguments()->length());
10665 HValue* value =
Pop();
10666 HInstruction* result = NewUncasted<HDoubleBits>(value, HDoubleBits::HIGH);
10671 void HOptimizedGraphBuilder::GenerateConstructDouble(CallRuntime* call) {
10672 ASSERT_EQ(2, call->arguments()->length());
10675 HValue*
lo =
Pop();
10676 HValue*
hi =
Pop();
10677 HInstruction* result = NewUncasted<HConstructDouble>(
hi,
lo);
10683 void HOptimizedGraphBuilder::GenerateRegExpConstructResult(CallRuntime* call) {
10684 ASSERT_EQ(3, call->arguments()->length());
10688 HValue* input =
Pop();
10689 HValue* index =
Pop();
10690 HValue* length =
Pop();
10697 void HOptimizedGraphBuilder::GenerateGetFromCache(CallRuntime* call) {
10698 return Bailout(kInlinedRuntimeFunctionGetFromCache);
10703 void HOptimizedGraphBuilder::GenerateNumberToString(CallRuntime* call) {
10704 ASSERT_EQ(1, call->arguments()->length());
10706 HValue* number =
Pop();
10713 void HOptimizedGraphBuilder::GenerateCallFunction(CallRuntime* call) {
10715 int arg_count = call->arguments()->length() - 1;
10720 HValue*
function =
Pop();
10724 IfBuilder if_is_jsfunction(
this);
10725 if_is_jsfunction.If<HHasInstanceTypeAndBranch>(
function,
JS_FUNCTION_TYPE);
10727 if_is_jsfunction.Then();
10729 HInstruction* invoke_result =
10730 Add<HInvokeFunction>(
function, arg_count);
10732 Push(invoke_result);
10737 if_is_jsfunction.Else();
10739 HInstruction* call_result =
10740 Add<HCallFunction>(
function, arg_count);
10746 if_is_jsfunction.End();
10759 void HOptimizedGraphBuilder::GenerateMathPow(CallRuntime* call) {
10760 ASSERT_EQ(2, call->arguments()->length());
10763 HValue* right =
Pop();
10764 HValue* left =
Pop();
10765 HInstruction* result = NewUncasted<HPower>(left, right);
10770 void HOptimizedGraphBuilder::GenerateMathLog(CallRuntime* call) {
10771 ASSERT(call->arguments()->length() == 1);
10773 HValue* value =
Pop();
10774 HInstruction* result = NewUncasted<HUnaryMathOperation>(value, kMathLog);
10779 void HOptimizedGraphBuilder::GenerateMathSqrt(CallRuntime* call) {
10780 ASSERT(call->arguments()->length() == 1);
10782 HValue* value =
Pop();
10783 HInstruction* result = NewUncasted<HUnaryMathOperation>(value, kMathSqrt);
10788 void HOptimizedGraphBuilder::GenerateGetCachedArrayIndex(CallRuntime* call) {
10789 ASSERT(call->arguments()->length() == 1);
10791 HValue* value =
Pop();
10792 HGetCachedArrayIndex* result = New<HGetCachedArrayIndex>(value);
10797 void HOptimizedGraphBuilder::GenerateFastAsciiArrayJoin(CallRuntime* call) {
10798 return Bailout(kInlinedRuntimeFunctionFastAsciiArrayJoin);
10803 void HOptimizedGraphBuilder::GenerateGeneratorNext(CallRuntime* call) {
10804 return Bailout(kInlinedRuntimeFunctionGeneratorNext);
10808 void HOptimizedGraphBuilder::GenerateGeneratorThrow(CallRuntime* call) {
10809 return Bailout(kInlinedRuntimeFunctionGeneratorThrow);
10813 void HOptimizedGraphBuilder::GenerateDebugBreakInOptimizedCode(
10814 CallRuntime* call) {
10815 Add<HDebugBreak>();
10820 #undef CHECK_BAILOUT
10824 HEnvironment::HEnvironment(HEnvironment* outer,
10826 Handle<JSFunction> closure,
10828 : closure_(closure),
10831 parameter_count_(0),
10832 specials_count_(1),
10838 ast_id_(BailoutId::
None()),
10840 Initialize(scope->num_parameters() + 1, scope->num_stack_slots(), 0);
10844 HEnvironment::HEnvironment(Zone* zone,
int parameter_count)
10845 : values_(0, zone),
10847 parameter_count_(parameter_count),
10848 specials_count_(1),
10854 ast_id_(BailoutId::
None()),
10856 Initialize(parameter_count, 0, 0);
10860 HEnvironment::HEnvironment(
const HEnvironment* other, Zone* zone)
10861 : values_(0, zone),
10863 parameter_count_(0),
10864 specials_count_(0),
10870 ast_id_(other->ast_id()),
10876 HEnvironment::HEnvironment(HEnvironment* outer,
10877 Handle<JSFunction> closure,
10881 : closure_(closure),
10882 values_(arguments, zone),
10883 frame_type_(frame_type),
10884 parameter_count_(arguments),
10885 specials_count_(0),
10891 ast_id_(BailoutId::
None()),
10896 void HEnvironment::Initialize(
int parameter_count,
10898 int stack_height) {
10899 parameter_count_ = parameter_count;
10900 local_count_ = local_count;
10903 int total = parameter_count + specials_count_ + local_count + stack_height;
10904 values_.Initialize(total + 4, zone());
10905 for (
int i = 0; i < total; ++i) values_.Add(NULL, zone());
10909 void HEnvironment::Initialize(
const HEnvironment* other) {
10910 closure_ = other->closure();
10911 values_.AddAll(other->values_, zone());
10912 assigned_variables_.Union(other->assigned_variables_, zone());
10913 frame_type_ = other->frame_type_;
10914 parameter_count_ = other->parameter_count_;
10915 local_count_ = other->local_count_;
10916 if (other->outer_ != NULL) outer_ = other->outer_->Copy();
10917 entry_ = other->entry_;
10918 pop_count_ = other->pop_count_;
10919 push_count_ = other->push_count_;
10920 specials_count_ = other->specials_count_;
10921 ast_id_ = other->ast_id_;
10925 void HEnvironment::AddIncomingEdge(HBasicBlock* block, HEnvironment* other) {
10926 ASSERT(!block->IsLoopHeader());
10927 ASSERT(values_.length() == other->values_.length());
10929 int length = values_.length();
10930 for (
int i = 0; i < length; ++i) {
10931 HValue* value = values_[i];
10932 if (value != NULL && value->IsPhi() && value->block() == block) {
10934 HPhi* phi = HPhi::cast(value);
10936 ASSERT(phi->merged_index() == i || !phi->HasMergedIndex());
10937 ASSERT(phi->OperandCount() == block->predecessors()->length());
10938 phi->AddInput(other->values_[i]);
10939 }
else if (values_[i] != other->values_[i]) {
10941 ASSERT(values_[i] != NULL && other->values_[i] != NULL);
10942 HPhi* phi = block->AddNewPhi(i);
10943 HValue* old_value = values_[i];
10944 for (
int j = 0; j < block->predecessors()->length(); j++) {
10945 phi->AddInput(old_value);
10947 phi->AddInput(other->values_[i]);
10948 this->values_[i] = phi;
10954 void HEnvironment::Bind(
int index, HValue* value) {
10956 assigned_variables_.Add(index, zone());
10957 values_[index] = value;
10961 bool HEnvironment::HasExpressionAt(
int index)
const {
10962 return index >= parameter_count_ + specials_count_ + local_count_;
10966 bool HEnvironment::ExpressionStackIsEmpty()
const {
10967 ASSERT(length() >= first_expression_index());
10968 return length() == first_expression_index();
10972 void HEnvironment::SetExpressionStackAt(
int index_from_top, HValue* value) {
10973 int count = index_from_top + 1;
10974 int index = values_.length() - count;
10975 ASSERT(HasExpressionAt(index));
10978 if (push_count_ < count) {
10980 pop_count_ += (count - push_count_);
10981 push_count_ = count;
10983 values_[index] = value;
10987 void HEnvironment::Drop(
int count) {
10988 for (
int i = 0; i < count; ++i) {
10994 HEnvironment* HEnvironment::Copy()
const {
10995 return new(zone()) HEnvironment(
this, zone());
10999 HEnvironment* HEnvironment::CopyWithoutHistory()
const {
11000 HEnvironment* result = Copy();
11001 result->ClearHistory();
11006 HEnvironment* HEnvironment::CopyAsLoopHeader(HBasicBlock* loop_header)
const {
11007 HEnvironment* new_env = Copy();
11008 for (
int i = 0; i < values_.length(); ++i) {
11009 HPhi* phi = loop_header->AddNewPhi(i);
11010 phi->AddInput(values_[i]);
11011 new_env->values_[i] = phi;
11013 new_env->ClearHistory();
11018 HEnvironment* HEnvironment::CreateStubEnvironment(HEnvironment* outer,
11019 Handle<JSFunction> target,
11021 int arguments)
const {
11022 HEnvironment* new_env =
11023 new(zone()) HEnvironment(outer, target, frame_type,
11024 arguments + 1, zone());
11025 for (
int i = 0; i <= arguments; ++i) {
11026 new_env->Push(ExpressionStackAt(arguments - i));
11028 new_env->ClearHistory();
11033 HEnvironment* HEnvironment::CopyForInlining(
11034 Handle<JSFunction> target,
11036 FunctionLiteral*
function,
11037 HConstant* undefined,
11042 int arity =
function->scope()->num_parameters();
11044 HEnvironment* outer = Copy();
11045 outer->Drop(arguments + 1);
11046 outer->ClearHistory();
11052 outer = CreateStubEnvironment(outer, target,
JS_CONSTRUCT, arguments);
11056 outer = CreateStubEnvironment(outer, target,
JS_GETTER, arguments);
11060 outer = CreateStubEnvironment(outer, target,
JS_SETTER, arguments);
11063 if (arity != arguments) {
11068 HEnvironment* inner =
11069 new(zone()) HEnvironment(outer, function->scope(), target, zone());
11071 for (
int i = 0; i <= arity; ++i) {
11072 HValue* push = (i <= arguments) ?
11073 ExpressionStackAt(arguments - i) : undefined;
11074 inner->SetValueAt(i, push);
11076 inner->SetValueAt(arity + 1, context());
11077 for (
int i = arity + 2; i < inner->length(); ++i) {
11078 inner->SetValueAt(i, undefined);
11086 void HEnvironment::PrintTo(StringStream* stream) {
11087 for (
int i = 0; i < length(); i++) {
11088 if (i == 0) stream->Add(
"parameters\n");
11089 if (i == parameter_count()) stream->Add(
"specials\n");
11090 if (i == parameter_count() + specials_count()) stream->Add(
"locals\n");
11091 if (i == parameter_count() + specials_count() + local_count()) {
11092 stream->Add(
"expressions\n");
11094 HValue* val = values_.at(i);
11095 stream->Add(
"%d: ", i);
11097 val->PrintNameTo(stream);
11099 stream->Add(
"NULL");
11107 void HEnvironment::PrintToStd() {
11108 HeapStringAllocator string_allocator;
11109 StringStream trace(&string_allocator);
11111 PrintF(
"%s", trace.ToCString().get());
11115 void HTracer::TraceCompilation(CompilationInfo* info) {
11116 Tag tag(
this,
"compilation");
11117 if (info->IsOptimizing()) {
11118 Handle<String> name = info->function()->debug_name();
11119 PrintStringProperty(
"name", name->ToCString().get());
11121 trace_.Add(
"method \"%s:%d\"\n",
11122 name->ToCString().get(),
11123 info->optimization_id());
11125 CodeStub::Major major_key = info->code_stub()->MajorKey();
11126 PrintStringProperty(
"name", CodeStub::MajorName(major_key,
false));
11127 PrintStringProperty(
"method",
"stub");
11133 void HTracer::TraceLithium(
const char* name, LChunk* chunk) {
11134 ASSERT(!chunk->isolate()->concurrent_recompilation_enabled());
11137 Trace(name, chunk->graph(), chunk);
11141 void HTracer::TraceHydrogen(
const char* name, HGraph* graph) {
11142 ASSERT(!graph->isolate()->concurrent_recompilation_enabled());
11145 Trace(name, graph, NULL);
11149 void HTracer::Trace(
const char* name, HGraph* graph, LChunk* chunk) {
11150 Tag tag(
this,
"cfg");
11151 PrintStringProperty(
"name", name);
11152 const ZoneList<HBasicBlock*>* blocks = graph->blocks();
11153 for (
int i = 0; i < blocks->length(); i++) {
11154 HBasicBlock* current = blocks->at(i);
11155 Tag block_tag(
this,
"block");
11156 PrintBlockProperty(
"name", current->block_id());
11157 PrintIntProperty(
"from_bci", -1);
11158 PrintIntProperty(
"to_bci", -1);
11160 if (!current->predecessors()->is_empty()) {
11162 trace_.Add(
"predecessors");
11163 for (
int j = 0; j < current->predecessors()->length(); ++j) {
11164 trace_.Add(
" \"B%d\"", current->predecessors()->at(j)->block_id());
11168 PrintEmptyProperty(
"predecessors");
11171 if (current->end()->SuccessorCount() == 0) {
11172 PrintEmptyProperty(
"successors");
11175 trace_.Add(
"successors");
11176 for (HSuccessorIterator it(current->end()); !it.Done(); it.Advance()) {
11177 trace_.Add(
" \"B%d\"", it.Current()->block_id());
11182 PrintEmptyProperty(
"xhandlers");
11186 trace_.Add(
"flags");
11187 if (current->IsLoopSuccessorDominator()) {
11188 trace_.Add(
" \"dom-loop-succ\"");
11190 if (current->IsUnreachable()) {
11191 trace_.Add(
" \"dead\"");
11193 if (current->is_osr_entry()) {
11194 trace_.Add(
" \"osr\"");
11199 if (current->dominator() !=
NULL) {
11200 PrintBlockProperty(
"dominator", current->dominator()->block_id());
11203 PrintIntProperty(
"loop_depth", current->LoopNestingDepth());
11205 if (chunk != NULL) {
11206 int first_index = current->first_instruction_index();
11207 int last_index = current->last_instruction_index();
11217 Tag states_tag(
this,
"states");
11218 Tag locals_tag(
this,
"locals");
11219 int total = current->phis()->length();
11220 PrintIntProperty(
"size", current->phis()->length());
11221 PrintStringProperty(
"method",
"None");
11222 for (
int j = 0; j < total; ++j) {
11223 HPhi* phi = current->phis()->at(j);
11225 trace_.Add(
"%d ", phi->merged_index());
11226 phi->PrintNameTo(&trace_);
11228 phi->PrintTo(&trace_);
11234 Tag HIR_tag(
this,
"HIR");
11235 for (HInstructionIterator it(current); !it.Done(); it.Advance()) {
11236 HInstruction* instruction = it.Current();
11237 int uses = instruction->UseCount();
11239 trace_.Add(
"0 %d ", uses);
11240 instruction->PrintNameTo(&trace_);
11242 instruction->PrintTo(&trace_);
11243 if (FLAG_hydrogen_track_positions &&
11244 instruction->has_position() &&
11245 instruction->position().raw() != 0) {
11246 const HSourcePosition pos = instruction->position();
11247 trace_.Add(
" pos:");
11248 if (pos.inlining_id() != 0) {
11249 trace_.Add(
"%d_", pos.inlining_id());
11251 trace_.Add(
"%d", pos.position());
11253 trace_.Add(
" <|@\n");
11258 if (chunk != NULL) {
11259 Tag LIR_tag(
this,
"LIR");
11260 int first_index = current->first_instruction_index();
11261 int last_index = current->last_instruction_index();
11262 if (first_index != -1 && last_index != -1) {
11263 const ZoneList<LInstruction*>* instructions = chunk->instructions();
11264 for (
int i = first_index; i <= last_index; ++i) {
11265 LInstruction* linstr = instructions->at(i);
11266 if (linstr != NULL) {
11270 linstr->PrintTo(&trace_);
11271 trace_.Add(
" [hir:");
11272 linstr->hydrogen_value()->PrintNameTo(&trace_);
11274 trace_.Add(
" <|@\n");
11283 void HTracer::TraceLiveRanges(
const char* name, LAllocator* allocator) {
11284 Tag tag(
this,
"intervals");
11285 PrintStringProperty(
"name", name);
11287 const Vector<LiveRange*>* fixed_d = allocator->fixed_double_live_ranges();
11288 for (
int i = 0; i < fixed_d->length(); ++i) {
11289 TraceLiveRange(fixed_d->at(i),
"fixed", allocator->zone());
11292 const Vector<LiveRange*>* fixed = allocator->fixed_live_ranges();
11293 for (
int i = 0; i < fixed->length(); ++i) {
11294 TraceLiveRange(fixed->at(i),
"fixed", allocator->zone());
11297 const ZoneList<LiveRange*>* live_ranges = allocator->live_ranges();
11298 for (
int i = 0; i < live_ranges->length(); ++i) {
11299 TraceLiveRange(live_ranges->at(i),
"object", allocator->zone());
11304 void HTracer::TraceLiveRange(LiveRange* range,
const char* type,
11306 if (range != NULL && !range->IsEmpty()) {
11308 trace_.Add(
"%d %s", range->id(), type);
11309 if (range->HasRegisterAssigned()) {
11310 LOperand* op = range->CreateAssignedOperand(zone);
11311 int assigned_reg = op->index();
11312 if (op->IsDoubleRegister()) {
11313 trace_.Add(
" \"%s\"",
11316 ASSERT(op->IsRegister());
11319 }
else if (range->IsSpilled()) {
11320 LOperand* op = range->TopLevel()->GetSpillOperand();
11321 if (op->IsDoubleStackSlot()) {
11322 trace_.Add(
" \"double_stack:%d\"", op->index());
11324 ASSERT(op->IsStackSlot());
11325 trace_.Add(
" \"stack:%d\"", op->index());
11328 int parent_index = -1;
11329 if (range->IsChild()) {
11330 parent_index = range->parent()->id();
11332 parent_index = range->id();
11334 LOperand* op = range->FirstHint();
11335 int hint_index = -1;
11336 if (op != NULL && op->IsUnallocated()) {
11339 trace_.Add(
" %d %d", parent_index, hint_index);
11340 UseInterval* cur_interval = range->first_interval();
11341 while (cur_interval != NULL && range->Covers(cur_interval->start())) {
11342 trace_.Add(
" [%d, %d[",
11343 cur_interval->start().Value(),
11344 cur_interval->end().Value());
11345 cur_interval = cur_interval->next();
11348 UsePosition* current_pos = range->first_pos();
11349 while (current_pos != NULL) {
11350 if (current_pos->RegisterIsBeneficial() || FLAG_trace_all_uses) {
11351 trace_.Add(
" %d M", current_pos->pos().Value());
11353 current_pos = current_pos->next();
11356 trace_.Add(
" \"\"\n");
11361 void HTracer::FlushToFile() {
11362 AppendChars(filename_.start(), trace_.ToCString().get(), trace_.length(),
11368 void HStatistics::Initialize(CompilationInfo* info) {
11369 if (info->shared_info().is_null())
return;
11370 source_size_ += info->shared_info()->SourceSize();
11375 PrintF(
"Timing results:\n");
11377 for (
int i = 0; i < times_.length(); ++i) {
11381 for (
int i = 0; i < names_.length(); ++i) {
11382 PrintF(
"%32s", names_[i]);
11383 double ms = times_[i].InMillisecondsF();
11384 double percent = times_[i].PercentOf(sum);
11385 PrintF(
" %8.3f ms / %4.1f %% ", ms, percent);
11387 unsigned size = sizes_[i];
11388 double size_percent =
static_cast<double>(
size) * 100 / total_size_;
11389 PrintF(
" %9u bytes / %4.1f %%\n", size, size_percent);
11392 PrintF(
"----------------------------------------"
11393 "---------------------------------------\n");
11394 TimeDelta total = create_graph_ + optimize_graph_ + generate_code_;
11395 PrintF(
"%32s %8.3f ms / %4.1f %% \n",
11397 create_graph_.InMillisecondsF(),
11398 create_graph_.PercentOf(total));
11399 PrintF(
"%32s %8.3f ms / %4.1f %% \n",
11401 optimize_graph_.InMillisecondsF(),
11402 optimize_graph_.PercentOf(total));
11403 PrintF(
"%32s %8.3f ms / %4.1f %% \n",
11404 "Generate and install code",
11405 generate_code_.InMillisecondsF(),
11406 generate_code_.PercentOf(total));
11407 PrintF(
"----------------------------------------"
11408 "---------------------------------------\n");
11409 PrintF(
"%32s %8.3f ms (%.1f times slower than full code gen)\n",
11411 total.InMillisecondsF(),
11412 total.TimesOf(full_code_gen_));
11414 double source_size_in_kb =
static_cast<double>(source_size_) / 1024;
11415 double normalized_time = source_size_in_kb > 0
11416 ? total.InMillisecondsF() / source_size_in_kb
11418 double normalized_size_in_kb = source_size_in_kb > 0
11419 ? total_size_ / 1024 / source_size_in_kb
11421 PrintF(
"%32s %8.3f ms %7.3f kB allocated\n",
11422 "Average per kB source",
11423 normalized_time, normalized_size_in_kb);
11427 void HStatistics::SaveTiming(
const char* name, TimeDelta time,
unsigned size) {
11428 total_size_ +=
size;
11429 for (
int i = 0; i < names_.length(); ++i) {
11430 if (strcmp(names_[i], name) == 0) {
11443 if (ShouldProduceTraceOutput()) {
11444 isolate()->GetHTracer()->TraceHydrogen(
name(), graph_);
11448 graph_->Verify(
false);
bool HasObservableSideEffects() const
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter NULL
#define INLINE_FUNCTION_LIST(F)
AstContext(HOptimizedGraphBuilder *owner, Expression::Context kind)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter NULL
HValue * TruncateToNumber(HValue *value, Type **expected)
void GotoNoSimulate(HBasicBlock *from, HBasicBlock *target)
static LUnallocated * cast(LOperand *op)
static const int kUnlimitedMaxInlinedNodesCumulative
Isolate * isolate() const
static HSourcePosition Unknown()
Handle< FixedArray > CopyAndTenureFixedCOWArray(Handle< FixedArray > array)
#define INLINE_FUNCTION_GENERATOR_ADDRESS(Name, argc, ressize)
VariableDeclaration * function() const
bool IsExternalArrayElementsKind(ElementsKind kind)
bool IsHoleyElementsKind(ElementsKind kind)
void VisitTypeof(UnaryOperation *expr)
#define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size)
static const int kPrototypeOrInitialMapOffset
static const int kBuiltinsOffset
void set_ast_context(AstContext *context)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths true
static Representation Smi()
void VisitVoid(UnaryOperation *expr)
void CopyFlag(Flag f, HValue *other)
void PrintF(const char *format,...)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf map
void PushArgumentsFromEnvironment(int count)
bool InNewSpace(Object *object)
static double hole_nan_as_double()
static TypeFeedbackInfo * cast(Object *obj)
static String * cast(Object *obj)
static Smi * FromInt(int value)
bool IsFastObjectElementsKind(ElementsKind kind)
static bool MakeCode(CompilationInfo *info)
static const int kEnumCacheBridgeIndicesCacheIndex
void VisitForValue(Expression *expr, ArgumentsAllowedFlag flag=ARGUMENTS_NOT_ALLOWED)
void ClearDependsOnFlag(GVNFlag f)
HBasicBlock * CreateLoop(IterationStatement *statement, HBasicBlock *loop_entry, HBasicBlock *body_exit, HBasicBlock *loop_successor, HBasicBlock *break_block)
HInstruction * AddLoadStringInstanceType(HValue *string)
void AddLeaveInlined(HBasicBlock *block, HValue *return_value, FunctionState *state)
Handle< Script > script() const
static Handle< String > cast(Handle< S > that)
void VisitDelete(UnaryOperation *expr)
HOsrBuilder * osr() const
static Representation Integer32()
static bool Analyze(CompilationInfo *info)
bool HasIllegalRedeclaration() const
CallInterfaceDescriptor * call_descriptor(CallDescriptorKey index)
static BailoutId StubEntry()
virtual void ReturnInstruction(HInstruction *instr, BailoutId ast_id)=0
HBasicBlock * function_return() const
PretenureFlag GetPretenureMode()
kSerializedDataOffset Object
HValue * BuildBinaryOperation(Token::Value op, HValue *left, HValue *right, Type *left_type, Type *right_type, Type *result_type, Maybe< int > fixed_right_arg, HAllocationMode allocation_mode)
TypeImpl< ZoneTypeConfig > Type
void set_this_has_uses(bool has_no_uses)
void set_bailout_reason(BailoutReason reason)
Location location() const
void AddSimulate(BailoutId id, RemovableSimulate removable=FIXED_SIMULATE)
bool EqualsInteger32Constant(int32_t value)
static ScopeInfo * Empty(Isolate *isolate)
static LifetimePosition FromInstructionIndex(int index)
virtual void ReturnControl(HControlInstruction *instr, BailoutId ast_id)=0
bool IsStackAllocated() const
HSourcePosition source_position()
AllocationSiteOverrideMode
static const Function * FunctionForId(FunctionId id)
friend class FunctionState
List< Handle< Map > > MapHandleList
#define ASSERT(condition)
HBasicBlock * current_block() const
const int kPointerSizeLog2
static Script * cast(Object *obj)
Iterator< i::Map > Classes()
FunctionState * function_state() const
static bool IsCompileTimeValue(Expression *expression)
static int OffsetOfFunctionWithId(Builtins::JavaScript id)
void VisitLoopBody(IterationStatement *stmt, HBasicBlock *loop_entry, BreakAndContinueInfo *break_info)
const uint32_t kStringRepresentationMask
static const int kUnlimitedMaxInlinedNodes
PerThreadAssertScopeDebugOnly< DEFERRED_HANDLE_DEREFERENCE_ASSERT, true > AllowDeferredHandleDereference
#define DEFINE_GET_CONSTANT(Name, name, htype, boolean_value)
HSourcePosition ScriptPositionToSourcePosition(int position)
void set_observed_input_representation(int index, Representation rep)
const intptr_t kObjectAlignmentMask
bool Maybe(TypeImpl *that)
void VisitDeclarations(ZoneList< Declaration * > *declarations)
bool IsFastElementsKind(ElementsKind kind)
bool IsSimpleMapChangeTransition(ElementsKind from_kind, ElementsKind to_kind)
static const int kMaxStorePolymorphism
static Representation FromType(Type *type)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object size
Handle< String > name() const
#define CHECK_ALIVE_OR_RETURN(call, value)
static Smi * cast(Object *object)
HEnvironment * environment() const
static const char * AllocationIndexToString(int index)
static Handle< ScopeInfo > Create(Scope *scope, Zone *zone)
Handle< String > FlattenGetString(Handle< String > string)
HInstruction * PreProcessCall(Instruction *call)
static const int kUnlimitedMaxInlinedSourceSize
HAllocate * BuildAllocate(HValue *object_size, HType type, InstanceType instance_type, HAllocationMode allocation_mode)
int ContextChainLength(Scope *scope)
kInstanceClassNameOffset flag
CompilationInfo * current_info() const
V8_INLINE Handle< Boolean > True(Isolate *isolate)
static Handle< Object > TryMigrateInstance(Handle< JSObject > instance)
void ClearInlinedTestContext()
virtual BailoutId ContinueId() const =0
void Bailout(BailoutReason reason)
static const int kMinLength
void Bind(Variable *var, HValue *value)
virtual HValue * context()=0
void(HOptimizedGraphBuilder::* InlineFunctionGenerator)(CallRuntime *call)
int virtual_register() const
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_string(expose_natives_as
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object size
void FinishExitWithHardDeoptimization(const char *reason)
int32_t GetInteger32Constant()
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in only print modified registers Don t break for ASM_UNIMPLEMENTED_BREAK macros print stack trace when an illegal exception is thrown randomize hashes to avoid predictable hash Fixed seed to use to hash property Print the time it takes to deserialize the snapshot testing_bool_flag testing_int_flag string flag tmp file in which to serialize heap Print the time it takes to lazily compile hydrogen code stubs concurrent_recompilation concurrent_sweeping Print usage including flags
bool IsFastPackedElementsKind(ElementsKind kind)
Variable * arguments() const
AstContext * call_context() const
void VisitForEffect(Expression *expr)
static const int kDontAdaptArgumentsSentinel
MUST_USE_RESULT MaybeObject * AsElementsKind(ElementsKind kind)
static bool IsValidElementsTransition(ElementsKind from_kind, ElementsKind to_kind)
static bool IsValid(intptr_t value)
static BailoutId Declarations()
static const int kMaxFastLiteralDepth
void check(i::Vector< const uint8_t > string)
void set_current_block(HBasicBlock *block)
BreakAndContinueScope * break_scope() const
static const InlineFunctionGenerator kInlineFunctionGenerators[]
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf map
Handle< Object > GlobalConstantFor(Handle< String > name)
HBasicBlock * BuildLoopEntry()
HBasicBlock * loop_header()
HValue * LookupAndMakeLive(Variable *var)
void BuildArrayBufferViewInitialization(HValue *obj, HValue *buffer, HValue *byte_offset, HValue *byte_length)
bool IsMoreGeneralElementsKindTransition(ElementsKind from_kind, ElementsKind to_kind)
static const int kMaxLoadPolymorphism
HInstruction * BuildCheckPrototypeMaps(Handle< JSObject > prototype, Handle< JSObject > holder)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
const uint32_t kOneByteDataHintMask
HInstruction * AddLoadStringLength(HValue *string)
virtual void ReturnContinuation(HIfContinuation *continuation, BailoutId ast_id)=0
StrictMode function_strict_mode()
Handle< FixedArray > NewFixedArray(int size, PretenureFlag pretenure=NOT_TENURED)
bool IsFixedTypedArrayElementsKind(ElementsKind kind)
bool IsLexicalVariableMode(VariableMode mode)
static const int kPropertiesOffset
bool IsContextSlot() const
static double TimeCurrentMillis()
virtual void initialize_output_representation(Representation observed)
int num_parameters() const
bool IsFastSmiElementsKind(ElementsKind kind)
void set_type(HType new_type)
static int SizeFor(int length)
static const int kElementsOffset
void VisitLogicalExpression(BinaryOperation *expr)
static BailoutId FunctionEntry()
HValue * BuildAllocateArrayFromLength(JSArrayBuilder *array_builder, HValue *length_argument)
static bool IsEqualityOp(Value op)
#define STATIC_ASCII_VECTOR(x)
V8_INLINE Handle< Primitive > Undefined(Isolate *isolate)
Handle< JSFunction > closure() const
bool IsDeclaredVariableMode(VariableMode mode)
HOptimizedGraphBuilder(CompilationInfo *info)
HInstruction * AddUncasted()
void VisitForTypeOf(Expression *expr)
void BuildCompareNil(HValue *value, Type *type, HIfContinuation *continuation)
void VisitNot(UnaryOperation *expr)
bool IsFastSmiOrObjectElementsKind(ElementsKind kind)
void SetUpScope(Scope *scope)
#define STRING_TYPE(NAME, size, name, Name)
static const int kHeaderSize
HBasicBlock * CreateJoin(HBasicBlock *first, HBasicBlock *second, BailoutId join_id)
static TypeHandle Intersect(TypeHandle type1, TypeHandle type2, Region *reg)
HInstruction * AddElementAccess(HValue *elements, HValue *checked_key, HValue *val, HValue *dependency, ElementsKind elements_kind, PropertyAccessType access_type, LoadKeyedHoleMode load_mode=NEVER_RETURN_HOLE)
HLoopInformation * loop()
void VisitForControl(Expression *expr, HBasicBlock *true_block, HBasicBlock *false_block)
static const int kMapOffset
HValue * BuildWrapReceiver(HValue *object, HValue *function)
V8_INLINE Handle< Boolean > False(Isolate *isolate)
#define CHECK_ALIVE(call)
HValue * BuildObjectSizeAlignment(HValue *unaligned_size, int header_size)
void SetSourcePosition(int position)
HValue * EnforceNumberType(HValue *number, Type *expected)
HValue * BuildRegExpConstructResult(HValue *length, HValue *index, HValue *input)
PostorderProcessor * PerformStep(Zone *zone, BitVector *visited, ZoneList< HBasicBlock * > *order)
static double nan_value()
void ChangeRepresentation(Representation r)
Handle< T > handle(T *t, Isolate *isolate)
HBasicBlock * BuildOsrLoopEntry(IterationStatement *statement)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function info
const intptr_t kObjectAlignment
HInstruction * BuildGetNativeContext(HValue *closure)
static PostorderProcessor * CreateEntryProcessor(Zone *zone, HBasicBlock *block, BitVector *visited)
bool HasOsrEntryAt(IterationStatement *statement)
ElementsKind GetInitialFastElementsKind()
static const int kMaxFastLiteralProperties
HValue * BuildCheckString(HValue *string)
HBasicBlock * CreateLoopHeaderBlock()
Type * ToType(Handle< Map > map)
CompilationInfo * top_info()
HValue * BuildCopyElementsOnWrite(HValue *object, HValue *elements, ElementsKind kind, HValue *length)
Handle< SharedFunctionInfo > shared_info() const
int AppendChars(const char *filename, const char *str, int size, bool verbose)
static Handle< Object > CreateArrayLiteralBoilerplate(Isolate *isolate, Handle< FixedArray > literals, Handle< FixedArray > elements)
void VisitExpressions(ZoneList< Expression * > *exprs)
HValue * BuildNumberToString(HValue *object, Type *type)
const uint32_t kOneByteDataHintTag
HBasicBlock * CreateBasicBlock(HEnvironment *env)
static AllocationSiteMode GetMode(ElementsKind boilerplate_elements_kind)
void BuildInitializeElementsHeader(HValue *elements, ElementsKind kind, HValue *capacity)
bool binding_needs_init() const
static Handle< T > null()
bool IsUnallocated() const
virtual void VisitStatements(ZoneList< Statement * > *statements) V8_OVERRIDE
virtual BailoutId StackCheckId() const =0
#define ASSERT_EQ(v1, v2)
void VisitComma(BinaryOperation *expr)
static Handle< SharedFunctionInfo > BuildFunctionInfo(FunctionLiteral *node, Handle< Script > script)
PostorderProcessor * child()
kInstanceClassNameOffset kNeedsAccessCheckBit kRemovePrototypeBit kIsExpressionBit kAllowLazyCompilation kUsesArguments formal_parameter_count
const uint32_t kOneByteStringTag
bool CanBeZero(HValue *right)
#define ASSERT_NE(v1, v2)
PerThreadAssertScopeDebugOnly< HEAP_ALLOCATION_ASSERT, false > DisallowHeapAllocation
static Builtins::JavaScript TokenToJSBuiltin(Token::Value op)
static const int kEnumCacheBridgeCacheIndex
void Print(const v8::FunctionCallbackInfo< v8::Value > &args)
HStoreNamedField * AddStoreMapConstant(HValue *object, Handle< Map > map)
PostorderProcessor * parent()
void Add(const T &element, AllocationPolicy allocator=AllocationPolicy())
static const char * AllocationIndexToString(int index)
bool IsFastHoleyElementsKind(ElementsKind kind)
static const int kMaxCallPolymorphism
HInstruction * BuildUncheckedMonomorphicElementAccess(HValue *checked_object, HValue *key, HValue *val, bool is_js_array, ElementsKind elements_kind, PropertyAccessType access_type, LoadKeyedHoleMode load_mode, KeyedAccessStoreMode store_mode)
bool has_global_object() const
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric literals(0o77, 0b11)") DEFINE_bool(harmony_strings
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_string(expose_natives_as
static TypeHandle Union(TypeHandle type1, TypeHandle type2, Region *reg)
bool IsInteger32Constant()
#define DEFINE_IS_CONSTANT(Name, name)
HLoadNamedField * AddLoadElements(HValue *object)
static const int kValueOffset
HValue * BuildCheckMap(HValue *obj, Handle< Map > map)
bool is_function_scope() const
PerThreadAssertScopeDebugOnly< HANDLE_DEREFERENCE_ASSERT, true > AllowHandleDereference
static Representation Tagged()
virtual bool IsJump() const
#define STATIC_ASSERT(test)
AstContext * ast_context() const
HInstruction * BuildConstantMapCheck(Handle< JSObject > constant, CompilationInfo *info)
void FinishCurrentBlock(HControlInstruction *last)
virtual bool BuildGraph() V8_OVERRIDE
bool MatchRotateRight(HValue *left, HValue *right, HValue **operand, HValue **shift_amount)
void BuildFillElementsWithHole(HValue *elements, ElementsKind elements_kind, HValue *from, HValue *to)
void BindIfLive(Variable *var, HValue *value)
static const int kInitialMaxFastElementArray
static void ArrayIdToTypeAndSize(int array_id, ExternalArrayType *type, ElementsKind *external_elements_kind, ElementsKind *fixed_elements_kind, size_t *element_size)
static const int kMaxValue
static void RecordFunctionCompilation(Logger::LogEventsAndTags tag, CompilationInfo *info, Handle< SharedFunctionInfo > shared)
HInnerAllocatedObject * BuildJSArrayHeader(HValue *array, HValue *array_map, AllocationSiteMode mode, ElementsKind elements_kind, HValue *allocation_site_payload, HValue *length_field)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in name
static HValue * cast(HValue *value)
#define CHECK_BAILOUT(call)
HValue * BuildCheckHeapObject(HValue *object)
virtual void ReturnValue(HValue *value)=0
HValue * BuildStringAdd(HValue *left, HValue *right, HAllocationMode allocation_mode)
ElementsKind GetHoleyElementsKind(ElementsKind packed_kind)
V8_INLINE Handle< Primitive > Null(Isolate *isolate)
static JSObject * cast(Object *obj)
HValue * AddLoadJSBuiltin(Builtins::JavaScript builtin)
bool IsDictionaryElementsKind(ElementsKind kind)
ZoneList< Declaration * > * declarations()
static uint32_t encode(boolvalue)
HInstruction * AddInstruction(HInstruction *instr)
static const int kAlignedSize
bool IsFastDoubleElementsKind(ElementsKind kind)
HBasicBlock * JoinContinue(IterationStatement *statement, HBasicBlock *exit_block, HBasicBlock *continue_block)
const uint32_t kStringEncodingMask
void Goto(HBasicBlock *from, HBasicBlock *target, FunctionState *state=NULL, bool add_simulate=true)
static Handle< JSObject > DeepWalk(Handle< JSObject > object, AllocationSiteCreationContext *site_context)
void VisitArithmeticExpression(BinaryOperation *expr)
static void AddDependentCompilationInfo(Handle< AllocationSite > site, Reason reason, CompilationInfo *info)
void FinishExitCurrentBlock(HControlInstruction *instruction)
TestContext * inlined_test_context() const
#define INLINE_OPTIMIZED_FUNCTION_LIST(F)
static void Run(CompilationInfo *info)
static JSFunction * cast(Object *obj)