38 #define DEFINE_COMPILE(type) \
39 void L##type::CompileToNative(LCodeGen* generator) { \
40 generator->Do##type(this); \
46 void LInstruction::VerifyCall() {
54 for (UseIterator it(
this); !it.Done(); it.Advance()) {
56 ASSERT(operand->HasFixedPolicy() ||
57 operand->IsUsedAtStart());
61 ASSERT(operand->HasFixedPolicy() ||!operand->HasRegisterPolicy());
68 stream->Add(
"%s ", this->
Mnemonic());
88 for (
int i = 0; i < InputCount(); i++) {
89 if (i > 0) stream->Add(
" ");
90 if (InputAt(i) ==
NULL) {
104 void LLabel::PrintDataTo(StringStream* stream) {
106 LLabel* rep = replacement();
108 stream->Add(
" Dead block replaced with B%d", rep->block_id());
114 for (
int i = 0; i < 4; i++) {
115 if (parallel_moves_[i] !=
NULL && !parallel_moves_[i]->
IsRedundant()) {
125 for (
int i = 0; i < 4; i++) {
127 if (parallel_moves_[i] !=
NULL) {
128 parallel_moves_[i]->PrintDataTo(stream);
135 const char* LArithmeticD::Mnemonic()
const {
141 case Token::MOD:
return "mod-d";
149 const char* LArithmeticT::Mnemonic()
const {
154 case Token::MOD:
return "mod-t";
156 case Token::BIT_AND:
return "bit-and-t";
157 case Token::BIT_OR:
return "bit-or-t";
158 case Token::BIT_XOR:
return "bit-xor-t";
160 case Token::SHL:
return "sll-t";
161 case Token::SAR:
return "sra-t";
162 case Token::SHR:
return "srl-t";
170 bool LGoto::HasInterestingComment(LCodeGen* gen)
const {
171 return !gen->IsNextEmittedBlock(block_id());
175 void LGoto::PrintDataTo(StringStream* stream) {
176 stream->Add(
"B%d", block_id());
180 void LBranch::PrintDataTo(StringStream* stream) {
181 stream->Add(
"B%d | B%d on ", true_block_id(), false_block_id());
182 value()->PrintTo(stream);
186 LInstruction* LChunkBuilder::DoDebugBreak(HDebugBreak* instr) {
187 return new(zone()) LDebugBreak();
191 void LCompareNumericAndBranch::PrintDataTo(StringStream* stream) {
193 left()->PrintTo(stream);
195 right()->PrintTo(stream);
196 stream->Add(
" then B%d else B%d", true_block_id(), false_block_id());
200 void LIsObjectAndBranch::PrintDataTo(StringStream* stream) {
201 stream->Add(
"if is_object(");
202 value()->PrintTo(stream);
203 stream->Add(
") then B%d else B%d", true_block_id(), false_block_id());
207 void LIsStringAndBranch::PrintDataTo(StringStream* stream) {
208 stream->Add(
"if is_string(");
209 value()->PrintTo(stream);
210 stream->Add(
") then B%d else B%d", true_block_id(), false_block_id());
214 void LIsSmiAndBranch::PrintDataTo(StringStream* stream) {
215 stream->Add(
"if is_smi(");
216 value()->PrintTo(stream);
217 stream->Add(
") then B%d else B%d", true_block_id(), false_block_id());
221 void LIsUndetectableAndBranch::PrintDataTo(StringStream* stream) {
222 stream->Add(
"if is_undetectable(");
223 value()->PrintTo(stream);
224 stream->Add(
") then B%d else B%d", true_block_id(), false_block_id());
228 void LStringCompareAndBranch::PrintDataTo(StringStream* stream) {
229 stream->Add(
"if string_compare(");
230 left()->PrintTo(stream);
231 right()->PrintTo(stream);
232 stream->Add(
") then B%d else B%d", true_block_id(), false_block_id());
236 void LHasInstanceTypeAndBranch::PrintDataTo(StringStream* stream) {
237 stream->Add(
"if has_instance_type(");
238 value()->PrintTo(stream);
239 stream->Add(
") then B%d else B%d", true_block_id(), false_block_id());
243 void LHasCachedArrayIndexAndBranch::PrintDataTo(StringStream* stream) {
244 stream->Add(
"if has_cached_array_index(");
245 value()->PrintTo(stream);
246 stream->Add(
") then B%d else B%d", true_block_id(), false_block_id());
250 void LClassOfTestAndBranch::PrintDataTo(StringStream* stream) {
251 stream->Add(
"if class_of_test(");
252 value()->PrintTo(stream);
253 stream->Add(
", \"%o\") then B%d else B%d",
254 *hydrogen()->class_name(),
260 void LTypeofIsAndBranch::PrintDataTo(StringStream* stream) {
261 stream->Add(
"if typeof ");
262 value()->PrintTo(stream);
263 stream->Add(
" == \"%s\" then B%d else B%d",
264 hydrogen()->type_literal()->
ToCString().
get(),
265 true_block_id(), false_block_id());
269 void LStoreCodeEntry::PrintDataTo(StringStream* stream) {
271 function()->PrintTo(stream);
272 stream->Add(
".code_entry = ");
273 code_object()->PrintTo(stream);
277 void LInnerAllocatedObject::PrintDataTo(StringStream* stream) {
279 base_object()->PrintTo(stream);
281 offset()->PrintTo(stream);
285 void LCallJSFunction::PrintDataTo(StringStream* stream) {
287 function()->PrintTo(stream);
288 stream->Add(
"#%d / ", arity());
292 void LCallWithDescriptor::PrintDataTo(StringStream* stream) {
293 for (
int i = 0; i < InputCount(); i++) {
294 InputAt(i)->PrintTo(stream);
297 stream->Add(
"#%d / ", arity());
301 void LLoadContextSlot::PrintDataTo(StringStream* stream) {
302 context()->PrintTo(stream);
303 stream->Add(
"[%d]", slot_index());
307 void LStoreContextSlot::PrintDataTo(StringStream* stream) {
308 context()->PrintTo(stream);
309 stream->Add(
"[%d] <- ", slot_index());
310 value()->PrintTo(stream);
314 void LInvokeFunction::PrintDataTo(StringStream* stream) {
316 function()->PrintTo(stream);
317 stream->Add(
" #%d / ", arity());
321 void LCallNew::PrintDataTo(StringStream* stream) {
323 constructor()->PrintTo(stream);
324 stream->Add(
" #%d / ", arity());
328 void LCallNewArray::PrintDataTo(StringStream* stream) {
330 constructor()->PrintTo(stream);
331 stream->Add(
" #%d / ", arity());
337 void LAccessArgumentsAt::PrintDataTo(StringStream* stream) {
338 arguments()->PrintTo(stream);
339 stream->Add(
" length ");
340 length()->PrintTo(stream);
341 stream->Add(
" index ");
342 index()->PrintTo(stream);
346 void LStoreNamedField::PrintDataTo(StringStream* stream) {
347 object()->PrintTo(stream);
348 hydrogen()->access().PrintTo(stream);
350 value()->PrintTo(stream);
354 void LStoreNamedGeneric::PrintDataTo(StringStream* stream) {
355 object()->PrintTo(stream);
359 value()->PrintTo(stream);
367 if (hydrogen()->IsDehoisted()) {
379 if (hydrogen()->IsDehoisted()) {
382 stream->Add(
"] <- ");
386 ASSERT(hydrogen()->IsConstantHoleStore() &&
387 hydrogen()->
value()->representation().IsDouble());
388 stream->Add(
"<the hole(nan)>");
395 void LStoreKeyedGeneric::PrintDataTo(StringStream* stream) {
396 object()->PrintTo(stream);
398 key()->PrintTo(stream);
399 stream->Add(
"] <- ");
400 value()->PrintTo(stream);
404 void LTransitionElementsKind::PrintDataTo(StringStream* stream) {
405 object()->PrintTo(stream);
406 stream->Add(
" %p -> %p", *original_map(), *transitioned_map());
410 int LPlatformChunk::GetNextSpillIndex(
RegisterKind kind) {
413 return spill_slot_count_++;
417 LOperand* LPlatformChunk::GetNextSpillSlot(
RegisterKind kind) {
418 int index = GetNextSpillIndex(kind);
420 return LDoubleStackSlot::Create(index, zone());
423 return LStackSlot::Create(index, zone());
428 LPlatformChunk* LChunkBuilder::Build() {
430 chunk_ =
new(zone()) LPlatformChunk(
info(), graph());
431 LPhase phase(
"L_Building chunk", chunk_);
436 if (graph()->has_osr()) {
437 for (
int i = graph()->osr()->UnoptimizedFrameSlots(); i > 0; i--) {
442 const ZoneList<HBasicBlock*>* blocks = graph()->blocks();
443 for (
int i = 0; i < blocks->length(); i++) {
444 HBasicBlock* next =
NULL;
445 if (i < blocks->length() - 1) next = blocks->at(i + 1);
446 DoBasicBlock(blocks->at(i), next);
447 if (is_aborted())
return NULL;
455 info()->set_bailout_reason(reason);
460 LUnallocated* LChunkBuilder::ToUnallocated(Register reg) {
472 LOperand* LChunkBuilder::UseFixed(HValue* value, Register fixed_register) {
473 return Use(value, ToUnallocated(fixed_register));
477 LOperand* LChunkBuilder::UseFixedDouble(HValue* value,
DoubleRegister reg) {
478 return Use(value, ToUnallocated(reg));
482 LOperand* LChunkBuilder::UseRegister(HValue* value) {
487 LOperand* LChunkBuilder::UseRegisterAtStart(HValue* value) {
494 LOperand* LChunkBuilder::UseTempRegister(HValue* value) {
499 LOperand* LChunkBuilder::Use(HValue* value) {
504 LOperand* LChunkBuilder::UseAtStart(HValue* value) {
510 LOperand* LChunkBuilder::UseOrConstant(HValue* value) {
511 return value->IsConstant()
512 ? chunk_->DefineConstantOperand(HConstant::cast(value))
517 LOperand* LChunkBuilder::UseOrConstantAtStart(HValue* value) {
518 return value->IsConstant()
519 ? chunk_->DefineConstantOperand(HConstant::cast(value))
524 LOperand* LChunkBuilder::UseRegisterOrConstant(HValue* value) {
525 return value->IsConstant()
526 ? chunk_->DefineConstantOperand(HConstant::cast(value))
527 : UseRegister(value);
531 LOperand* LChunkBuilder::UseRegisterOrConstantAtStart(HValue* value) {
532 return value->IsConstant()
533 ? chunk_->DefineConstantOperand(HConstant::cast(value))
534 : UseRegisterAtStart(value);
538 LOperand* LChunkBuilder::UseConstant(HValue* value) {
539 return chunk_->DefineConstantOperand(HConstant::cast(value));
543 LOperand* LChunkBuilder::UseAny(HValue* value) {
544 return value->IsConstant()
545 ? chunk_->DefineConstantOperand(HConstant::cast(value))
550 LOperand* LChunkBuilder::Use(HValue* value, LUnallocated* operand) {
551 if (value->EmitAtUses()) {
553 VisitInstruction(instr);
555 operand->set_virtual_register(value->id());
560 LInstruction* LChunkBuilder::Define(LTemplateResultInstruction<1>* instr,
561 LUnallocated* result) {
562 result->set_virtual_register(current_instruction_->id());
563 instr->set_result(result);
568 LInstruction* LChunkBuilder::DefineAsRegister(
569 LTemplateResultInstruction<1>* instr) {
575 LInstruction* LChunkBuilder::DefineAsSpilled(
576 LTemplateResultInstruction<1>* instr,
int index) {
582 LInstruction* LChunkBuilder::DefineSameAsFirst(
583 LTemplateResultInstruction<1>* instr) {
589 LInstruction* LChunkBuilder::DefineFixed(
590 LTemplateResultInstruction<1>* instr, Register reg) {
591 return Define(instr, ToUnallocated(reg));
595 LInstruction* LChunkBuilder::DefineFixedDouble(
597 return Define(instr, ToUnallocated(reg));
601 LInstruction* LChunkBuilder::AssignEnvironment(LInstruction* instr) {
602 HEnvironment* hydrogen_env = current_block_->last_environment();
603 int argument_index_accumulator = 0;
604 ZoneList<HValue*> objects_to_materialize(0, zone());
605 instr->set_environment(CreateEnvironment(hydrogen_env,
606 &argument_index_accumulator,
607 &objects_to_materialize));
612 LInstruction* LChunkBuilder::MarkAsCall(LInstruction* instr,
613 HInstruction* hinstr,
614 CanDeoptimize can_deoptimize) {
615 info()->MarkAsNonDeferredCalling();
620 instr = AssignPointerMap(instr);
626 bool needs_environment =
627 (can_deoptimize == CAN_DEOPTIMIZE_EAGERLY) ||
628 !hinstr->HasObservableSideEffects();
629 if (needs_environment && !instr->HasEnvironment()) {
630 instr = AssignEnvironment(instr);
637 LInstruction* LChunkBuilder::AssignPointerMap(LInstruction* instr) {
638 ASSERT(!instr->HasPointerMap());
639 instr->set_pointer_map(
new(zone()) LPointerMap(zone()));
644 LUnallocated* LChunkBuilder::TempRegister() {
645 LUnallocated* operand =
647 int vreg = allocator_->GetVirtualRegister();
648 if (!allocator_->AllocationOk()) {
649 Abort(kOutOfVirtualRegistersWhileTryingToAllocateTempRegister);
652 operand->set_virtual_register(vreg);
657 LOperand* LChunkBuilder::FixedTemp(Register reg) {
658 LUnallocated* operand = ToUnallocated(reg);
659 ASSERT(operand->HasFixedPolicy());
665 LUnallocated* operand = ToUnallocated(reg);
666 ASSERT(operand->HasFixedPolicy());
671 LInstruction* LChunkBuilder::DoBlockEntry(HBlockEntry* instr) {
672 return new(zone()) LLabel(instr->block());
676 LInstruction* LChunkBuilder::DoDummyUse(HDummyUse* instr) {
677 return DefineAsRegister(
new(zone()) LDummyUse(UseAny(instr->value())));
681 LInstruction* LChunkBuilder::DoEnvironmentMarker(HEnvironmentMarker* instr) {
687 LInstruction* LChunkBuilder::DoDeoptimize(HDeoptimize* instr) {
688 return AssignEnvironment(
new(zone()) LDeoptimize);
693 HBitwiseBinaryOperation* instr) {
694 if (instr->representation().IsSmiOrInteger32()) {
695 ASSERT(instr->left()->representation().Equals(instr->representation()));
696 ASSERT(instr->right()->representation().Equals(instr->representation()));
697 LOperand* left = UseRegisterAtStart(instr->left());
699 HValue* right_value = instr->right();
700 LOperand* right =
NULL;
701 int constant_value = 0;
702 bool does_deopt =
false;
703 if (right_value->IsConstant()) {
704 HConstant* constant = HConstant::cast(right_value);
705 right = chunk_->DefineConstantOperand(constant);
706 constant_value = constant->Integer32Value() & 0x1f;
709 if (instr->representation().IsSmi() && constant_value > 0) {
713 right = UseRegisterAtStart(right_value);
718 if (op == Token::SHR && constant_value == 0) {
719 if (FLAG_opt_safe_uint32_operations) {
726 LInstruction* result =
727 DefineAsRegister(
new(zone()) LShiftI(op, left, right, does_deopt));
728 return does_deopt ? AssignEnvironment(result) : result;
730 return DoArithmeticT(op, instr);
735 LInstruction* LChunkBuilder::DoArithmeticD(
Token::Value op,
736 HArithmeticBinaryOperation* instr) {
737 ASSERT(instr->representation().IsDouble());
738 ASSERT(instr->left()->representation().IsDouble());
739 ASSERT(instr->right()->representation().IsDouble());
740 if (op == Token::MOD) {
741 LOperand* left = UseFixedDouble(instr->left(),
f2);
742 LOperand* right = UseFixedDouble(instr->right(),
f4);
743 LArithmeticD* result =
new(zone()) LArithmeticD(op, left, right);
747 return MarkAsCall(DefineFixedDouble(result,
f2), instr);
749 LOperand* left = UseRegisterAtStart(instr->left());
750 LOperand* right = UseRegisterAtStart(instr->right());
751 LArithmeticD* result =
new(zone()) LArithmeticD(op, left, right);
752 return DefineAsRegister(result);
757 LInstruction* LChunkBuilder::DoArithmeticT(
Token::Value op,
758 HBinaryOperation* instr) {
759 HValue* left = instr->left();
760 HValue* right = instr->right();
761 ASSERT(left->representation().IsTagged());
762 ASSERT(right->representation().IsTagged());
763 LOperand* context = UseFixed(instr->context(),
cp);
764 LOperand* left_operand = UseFixed(left, a1);
765 LOperand* right_operand = UseFixed(right, a0);
766 LArithmeticT* result =
767 new(zone()) LArithmeticT(op, context, left_operand, right_operand);
768 return MarkAsCall(DefineFixed(result, v0), instr);
772 void LChunkBuilder::DoBasicBlock(HBasicBlock* block, HBasicBlock* next_block) {
774 current_block_ = block;
775 next_block_ = next_block;
776 if (block->IsStartBlock()) {
777 block->UpdateEnvironment(graph_->start_environment());
779 }
else if (block->predecessors()->length() == 1) {
782 ASSERT(block->phis()->length() == 0);
783 HBasicBlock* pred = block->predecessors()->at(0);
784 HEnvironment* last_environment = pred->last_environment();
787 if (pred->end()->SecondSuccessor() ==
NULL) {
788 ASSERT(pred->end()->FirstSuccessor() == block);
790 if (pred->end()->FirstSuccessor()->block_id() > block->block_id() ||
791 pred->end()->SecondSuccessor()->block_id() > block->block_id()) {
792 last_environment = last_environment->Copy();
795 block->UpdateEnvironment(last_environment);
796 ASSERT(pred->argument_count() >= 0);
797 argument_count_ = pred->argument_count();
800 HBasicBlock* pred = block->predecessors()->at(0);
802 HEnvironment* last_environment = pred->last_environment();
803 for (
int i = 0; i < block->phis()->length(); ++i) {
804 HPhi* phi = block->phis()->at(i);
805 if (phi->HasMergedIndex()) {
806 last_environment->SetValueAt(phi->merged_index(), phi);
809 for (
int i = 0; i < block->deleted_phis()->length(); ++i) {
810 if (block->deleted_phis()->at(i) < last_environment->length()) {
811 last_environment->SetValueAt(block->deleted_phis()->at(i),
812 graph_->GetConstantUndefined());
815 block->UpdateEnvironment(last_environment);
817 argument_count_ = pred->argument_count();
819 HInstruction* current = block->first();
820 int start = chunk_->instructions()->length();
821 while (current !=
NULL && !is_aborted()) {
823 if (!current->EmitAtUses()) {
824 VisitInstruction(current);
826 current = current->next();
828 int end = chunk_->instructions()->length() - 1;
830 block->set_first_instruction_index(start);
831 block->set_last_instruction_index(end);
833 block->set_argument_count(argument_count_);
835 current_block_ =
NULL;
839 void LChunkBuilder::VisitInstruction(HInstruction* current) {
840 HInstruction* old_current = current_instruction_;
841 current_instruction_ = current;
843 LInstruction* instr =
NULL;
844 if (current->CanReplaceWithDummyUses()) {
845 if (current->OperandCount() == 0) {
846 instr = DefineAsRegister(
new(zone()) LDummy());
848 ASSERT(!current->OperandAt(0)->IsControlInstruction());
849 instr = DefineAsRegister(
new(zone())
850 LDummyUse(UseAny(current->OperandAt(0))));
852 for (
int i = 1; i < current->OperandCount(); ++i) {
853 if (current->OperandAt(i)->IsControlInstruction())
continue;
854 LInstruction* dummy =
855 new(zone()) LDummyUse(UseAny(current->OperandAt(i)));
856 dummy->set_hydrogen_value(current);
857 chunk_->AddInstruction(dummy, current_block_);
860 instr = current->CompileToLithium(
this);
863 argument_count_ += current->argument_delta();
864 ASSERT(argument_count_ >= 0);
869 instr->set_hydrogen_value(current);
882 if (!(instr->ClobbersRegisters() && instr->ClobbersDoubleRegisters())) {
884 int used_at_start = 0;
885 for (UseIterator it(instr); !it.Done(); it.Advance()) {
887 if (operand->IsUsedAtStart()) ++used_at_start;
889 if (instr->Output() !=
NULL) {
892 for (TempIterator it(instr); !it.Done(); it.Advance()) {
894 if (operand->HasFixedPolicy()) ++fixed;
896 ASSERT(fixed == 0 || used_at_start == 0);
900 if (FLAG_stress_pointer_maps && !instr->HasPointerMap()) {
901 instr = AssignPointerMap(instr);
903 if (FLAG_stress_environments && !instr->HasEnvironment()) {
904 instr = AssignEnvironment(instr);
906 chunk_->AddInstruction(instr, current_block_);
908 if (instr->IsCall()) {
909 HValue* hydrogen_value_for_lazy_bailout = current;
910 LInstruction* instruction_needing_environment =
NULL;
911 if (current->HasObservableSideEffects()) {
912 HSimulate* sim = HSimulate::cast(current->next());
913 instruction_needing_environment = instr;
914 sim->ReplayEnvironment(current_block_->last_environment());
915 hydrogen_value_for_lazy_bailout = sim;
917 LInstruction* bailout = AssignEnvironment(
new(zone()) LLazyBailout());
918 bailout->set_hydrogen_value(hydrogen_value_for_lazy_bailout);
919 chunk_->AddInstruction(bailout, current_block_);
920 if (instruction_needing_environment !=
NULL) {
923 instruction_needing_environment->
924 SetDeferredLazyDeoptimizationEnvironment(bailout->environment());
928 current_instruction_ = old_current;
932 LInstruction* LChunkBuilder::DoGoto(HGoto* instr) {
933 return new(zone()) LGoto(instr->FirstSuccessor());
937 LInstruction* LChunkBuilder::DoBranch(HBranch* instr) {
938 LInstruction* goto_instr = CheckElideControlInstruction(instr);
939 if (goto_instr !=
NULL)
return goto_instr;
941 HValue* value = instr->value();
942 LBranch* result =
new(zone()) LBranch(UseRegister(value));
946 Representation rep = value->representation();
947 HType type = value->type();
948 ToBooleanStub::Types expected = instr->expected_input_types();
949 if (rep.IsTagged() && !type.IsSmi() && !type.IsBoolean() &&
950 !expected.IsGeneric()) {
951 return AssignEnvironment(result);
957 LInstruction* LChunkBuilder::DoCompareMap(HCompareMap* instr) {
958 LInstruction* goto_instr = CheckElideControlInstruction(instr);
959 if (goto_instr !=
NULL)
return goto_instr;
961 ASSERT(instr->value()->representation().IsTagged());
962 LOperand* value = UseRegisterAtStart(instr->value());
963 LOperand* temp = TempRegister();
964 return new(zone()) LCmpMapAndBranch(value, temp);
968 LInstruction* LChunkBuilder::DoArgumentsLength(HArgumentsLength* length) {
969 info()->MarkAsRequiresFrame();
970 return DefineAsRegister(
971 new(zone()) LArgumentsLength(UseRegister(length->value())));
975 LInstruction* LChunkBuilder::DoArgumentsElements(HArgumentsElements* elems) {
976 info()->MarkAsRequiresFrame();
977 return DefineAsRegister(
new(zone()) LArgumentsElements);
981 LInstruction* LChunkBuilder::DoInstanceOf(HInstanceOf* instr) {
982 LOperand* context = UseFixed(instr->context(),
cp);
983 LInstanceOf* result =
984 new(zone()) LInstanceOf(context, UseFixed(instr->left(), a0),
985 UseFixed(instr->right(), a1));
986 return MarkAsCall(DefineFixed(result, v0), instr);
990 LInstruction* LChunkBuilder::DoInstanceOfKnownGlobal(
991 HInstanceOfKnownGlobal* instr) {
992 LInstanceOfKnownGlobal* result =
993 new(zone()) LInstanceOfKnownGlobal(
994 UseFixed(instr->context(),
cp),
995 UseFixed(instr->left(), a0),
997 return MarkAsCall(DefineFixed(result, v0), instr);
1001 LInstruction* LChunkBuilder::DoWrapReceiver(HWrapReceiver* instr) {
1002 LOperand* receiver = UseRegisterAtStart(instr->receiver());
1003 LOperand*
function = UseRegisterAtStart(instr->function());
1004 LWrapReceiver* result =
new(zone()) LWrapReceiver(receiver,
function);
1005 return AssignEnvironment(DefineAsRegister(result));
1009 LInstruction* LChunkBuilder::DoApplyArguments(HApplyArguments* instr) {
1010 LOperand*
function = UseFixed(instr->function(), a1);
1011 LOperand* receiver = UseFixed(instr->receiver(), a0);
1012 LOperand* length = UseFixed(instr->length(), a2);
1013 LOperand* elements = UseFixed(instr->elements(), a3);
1014 LApplyArguments* result =
new(zone()) LApplyArguments(
function,
1018 return MarkAsCall(DefineFixed(result, v0), instr, CAN_DEOPTIMIZE_EAGERLY);
1022 LInstruction* LChunkBuilder::DoPushArgument(HPushArgument* instr) {
1023 LOperand* argument = Use(instr->argument());
1024 return new(zone()) LPushArgument(argument);
1028 LInstruction* LChunkBuilder::DoStoreCodeEntry(
1029 HStoreCodeEntry* store_code_entry) {
1030 LOperand*
function = UseRegister(store_code_entry->function());
1031 LOperand* code_object = UseTempRegister(store_code_entry->code_object());
1032 return new(zone()) LStoreCodeEntry(
function, code_object);
1036 LInstruction* LChunkBuilder::DoInnerAllocatedObject(
1037 HInnerAllocatedObject* instr) {
1038 LOperand* base_object = UseRegisterAtStart(instr->base_object());
1039 LOperand* offset = UseRegisterOrConstantAtStart(instr->offset());
1040 return DefineAsRegister(
1041 new(zone()) LInnerAllocatedObject(base_object, offset));
1045 LInstruction* LChunkBuilder::DoThisFunction(HThisFunction* instr) {
1046 return instr->HasNoUses()
1048 : DefineAsRegister(
new(zone()) LThisFunction);
1052 LInstruction* LChunkBuilder::DoContext(HContext* instr) {
1053 if (instr->HasNoUses())
return NULL;
1055 if (
info()->IsStub()) {
1056 return DefineFixed(
new(zone()) LContext,
cp);
1059 return DefineAsRegister(
new(zone()) LContext);
1063 LInstruction* LChunkBuilder::DoDeclareGlobals(HDeclareGlobals* instr) {
1064 LOperand* context = UseFixed(instr->context(),
cp);
1065 return MarkAsCall(
new(zone()) LDeclareGlobals(context), instr);
1069 LInstruction* LChunkBuilder::DoCallJSFunction(
1070 HCallJSFunction* instr) {
1071 LOperand*
function = UseFixed(instr->function(), a1);
1073 LCallJSFunction* result =
new(zone()) LCallJSFunction(
function);
1075 return MarkAsCall(DefineFixed(result, v0), instr);
1079 LInstruction* LChunkBuilder::DoCallWithDescriptor(
1080 HCallWithDescriptor* instr) {
1081 const CallInterfaceDescriptor* descriptor = instr->descriptor();
1083 LOperand* target = UseRegisterOrConstantAtStart(instr->target());
1084 ZoneList<LOperand*> ops(instr->OperandCount(), zone());
1085 ops.Add(target, zone());
1086 for (
int i = 1; i < instr->OperandCount(); i++) {
1087 LOperand* op = UseFixed(instr->OperandAt(i),
1088 descriptor->GetParameterRegister(i - 1));
1089 ops.Add(op, zone());
1092 LCallWithDescriptor* result =
new(zone()) LCallWithDescriptor(
1093 descriptor, ops, zone());
1094 return MarkAsCall(DefineFixed(result, v0), instr);
1098 LInstruction* LChunkBuilder::DoInvokeFunction(HInvokeFunction* instr) {
1099 LOperand* context = UseFixed(instr->context(),
cp);
1100 LOperand*
function = UseFixed(instr->function(), a1);
1101 LInvokeFunction* result =
new(zone()) LInvokeFunction(context,
function);
1102 return MarkAsCall(DefineFixed(result, v0), instr, CANNOT_DEOPTIMIZE_EAGERLY);
1106 LInstruction* LChunkBuilder::DoUnaryMathOperation(HUnaryMathOperation* instr) {
1107 switch (instr->op()) {
1108 case kMathFloor:
return DoMathFloor(instr);
1109 case kMathRound:
return DoMathRound(instr);
1110 case kMathAbs:
return DoMathAbs(instr);
1111 case kMathLog:
return DoMathLog(instr);
1112 case kMathExp:
return DoMathExp(instr);
1113 case kMathSqrt:
return DoMathSqrt(instr);
1123 LInstruction* LChunkBuilder::DoMathLog(HUnaryMathOperation* instr) {
1124 ASSERT(instr->representation().IsDouble());
1125 ASSERT(instr->value()->representation().IsDouble());
1126 LOperand* input = UseFixedDouble(instr->value(),
f4);
1127 return MarkAsCall(DefineFixedDouble(
new(zone()) LMathLog(input),
f4), instr);
1131 LInstruction* LChunkBuilder::DoMathClz32(HUnaryMathOperation* instr) {
1132 LOperand* input = UseRegisterAtStart(instr->value());
1133 LMathClz32* result =
new(zone()) LMathClz32(input);
1134 return DefineAsRegister(result);
1138 LInstruction* LChunkBuilder::DoMathExp(HUnaryMathOperation* instr) {
1139 ASSERT(instr->representation().IsDouble());
1140 ASSERT(instr->value()->representation().IsDouble());
1141 LOperand* input = UseRegister(instr->value());
1142 LOperand* temp1 = TempRegister();
1143 LOperand* temp2 = TempRegister();
1144 LOperand* double_temp = FixedTemp(
f6);
1145 LMathExp* result =
new(zone()) LMathExp(input, double_temp, temp1, temp2);
1146 return DefineAsRegister(result);
1150 LInstruction* LChunkBuilder::DoMathPowHalf(HUnaryMathOperation* instr) {
1152 LOperand* input = UseFixedDouble(instr->value(),
f8);
1153 LOperand* temp = FixedTemp(
f6);
1154 LMathPowHalf* result =
new(zone()) LMathPowHalf(input, temp);
1155 return DefineFixedDouble(result,
f4);
1159 LInstruction* LChunkBuilder::DoMathAbs(HUnaryMathOperation* instr) {
1160 Representation r = instr->value()->representation();
1161 LOperand* context = (r.IsDouble() || r.IsSmiOrInteger32())
1163 : UseFixed(instr->context(),
cp);
1164 LOperand* input = UseRegister(instr->value());
1165 LMathAbs* result =
new(zone()) LMathAbs(context, input);
1166 return AssignEnvironment(AssignPointerMap(DefineAsRegister(result)));
1170 LInstruction* LChunkBuilder::DoMathFloor(HUnaryMathOperation* instr) {
1171 LOperand* input = UseRegister(instr->value());
1172 LOperand* temp = TempRegister();
1173 LMathFloor* result =
new(zone()) LMathFloor(input, temp);
1174 return AssignEnvironment(AssignPointerMap(DefineAsRegister(result)));
1178 LInstruction* LChunkBuilder::DoMathSqrt(HUnaryMathOperation* instr) {
1179 LOperand* input = UseRegister(instr->value());
1180 LMathSqrt* result =
new(zone()) LMathSqrt(input);
1181 return DefineAsRegister(result);
1185 LInstruction* LChunkBuilder::DoMathRound(HUnaryMathOperation* instr) {
1186 LOperand* input = UseRegister(instr->value());
1187 LOperand* temp = FixedTemp(
f6);
1188 LMathRound* result =
new(zone()) LMathRound(input, temp);
1189 return AssignEnvironment(DefineAsRegister(result));
1193 LInstruction* LChunkBuilder::DoCallNew(HCallNew* instr) {
1194 LOperand* context = UseFixed(instr->context(),
cp);
1195 LOperand* constructor = UseFixed(instr->constructor(), a1);
1196 LCallNew* result =
new(zone()) LCallNew(context, constructor);
1197 return MarkAsCall(DefineFixed(result, v0), instr);
1201 LInstruction* LChunkBuilder::DoCallNewArray(HCallNewArray* instr) {
1202 LOperand* context = UseFixed(instr->context(),
cp);
1203 LOperand* constructor = UseFixed(instr->constructor(), a1);
1204 LCallNewArray* result =
new(zone()) LCallNewArray(context, constructor);
1205 return MarkAsCall(DefineFixed(result, v0), instr);
1209 LInstruction* LChunkBuilder::DoCallFunction(HCallFunction* instr) {
1210 LOperand* context = UseFixed(instr->context(),
cp);
1211 LOperand*
function = UseFixed(instr->function(), a1);
1212 LCallFunction* call =
new(zone()) LCallFunction(context,
function);
1213 return MarkAsCall(DefineFixed(call, v0), instr);
1217 LInstruction* LChunkBuilder::DoCallRuntime(HCallRuntime* instr) {
1218 LOperand* context = UseFixed(instr->context(),
cp);
1219 return MarkAsCall(DefineFixed(
new(zone()) LCallRuntime(context), v0), instr);
1223 LInstruction* LChunkBuilder::DoRor(HRor* instr) {
1228 LInstruction* LChunkBuilder::DoShr(HShr* instr) {
1229 return DoShift(Token::SHR, instr);
1233 LInstruction* LChunkBuilder::DoSar(HSar* instr) {
1234 return DoShift(Token::SAR, instr);
1238 LInstruction* LChunkBuilder::DoShl(HShl* instr) {
1239 return DoShift(Token::SHL, instr);
1243 LInstruction* LChunkBuilder::DoBitwise(HBitwise* instr) {
1244 if (instr->representation().IsSmiOrInteger32()) {
1245 ASSERT(instr->left()->representation().Equals(instr->representation()));
1246 ASSERT(instr->right()->representation().Equals(instr->representation()));
1249 LOperand* left = UseRegisterAtStart(instr->BetterLeftOperand());
1250 LOperand* right = UseOrConstantAtStart(instr->BetterRightOperand());
1251 return DefineAsRegister(
new(zone()) LBitI(left, right));
1253 return DoArithmeticT(instr->op(), instr);
1258 LInstruction* LChunkBuilder::DoDivByPowerOf2I(HDiv* instr) {
1259 ASSERT(instr->representation().IsSmiOrInteger32());
1260 ASSERT(instr->left()->representation().Equals(instr->representation()));
1261 ASSERT(instr->right()->representation().Equals(instr->representation()));
1262 LOperand* dividend = UseRegister(instr->left());
1263 int32_t divisor = instr->right()->GetInteger32Constant();
1264 LInstruction* result = DefineAsRegister(
new(zone()) LDivByPowerOf2I(
1265 dividend, divisor));
1269 divisor != 1 && divisor != -1)) {
1270 result = AssignEnvironment(result);
1276 LInstruction* LChunkBuilder::DoDivByConstI(HDiv* instr) {
1277 ASSERT(instr->representation().IsInteger32());
1278 ASSERT(instr->left()->representation().Equals(instr->representation()));
1279 ASSERT(instr->right()->representation().Equals(instr->representation()));
1280 LOperand* dividend = UseRegister(instr->left());
1281 int32_t divisor = instr->right()->GetInteger32Constant();
1282 LInstruction* result = DefineAsRegister(
new(zone()) LDivByConstI(
1283 dividend, divisor));
1287 result = AssignEnvironment(result);
1293 LInstruction* LChunkBuilder::DoDivI(HBinaryOperation* instr) {
1294 ASSERT(instr->representation().IsSmiOrInteger32());
1295 ASSERT(instr->left()->representation().Equals(instr->representation()));
1296 ASSERT(instr->right()->representation().Equals(instr->representation()));
1297 LOperand* dividend = UseRegister(instr->left());
1298 LOperand* divisor = UseRegister(instr->right());
1299 LDivI* div =
new(zone()) LDivI(dividend, divisor);
1300 return AssignEnvironment(DefineAsRegister(div));
1304 LInstruction* LChunkBuilder::DoDiv(HDiv* instr) {
1305 if (instr->representation().IsSmiOrInteger32()) {
1306 if (instr->RightIsPowerOf2()) {
1307 return DoDivByPowerOf2I(instr);
1308 }
else if (instr->right()->IsConstant()) {
1309 return DoDivByConstI(instr);
1311 return DoDivI(instr);
1313 }
else if (instr->representation().IsDouble()) {
1321 LInstruction* LChunkBuilder::DoFlooringDivByPowerOf2I(HMathFloorOfDiv* instr) {
1322 LOperand* dividend = UseRegisterAtStart(instr->left());
1323 int32_t divisor = instr->right()->GetInteger32Constant();
1324 LInstruction* result = DefineAsRegister(
new(zone()) LFlooringDivByPowerOf2I(
1325 dividend, divisor));
1328 result = AssignEnvironment(result);
1334 LInstruction* LChunkBuilder::DoFlooringDivByConstI(HMathFloorOfDiv* instr) {
1335 ASSERT(instr->representation().IsInteger32());
1336 ASSERT(instr->left()->representation().Equals(instr->representation()));
1337 ASSERT(instr->right()->representation().Equals(instr->representation()));
1338 LOperand* dividend = UseRegister(instr->left());
1339 int32_t divisor = instr->right()->GetInteger32Constant();
1343 NULL : TempRegister();
1344 LInstruction* result = DefineAsRegister(
1345 new(zone()) LFlooringDivByConstI(dividend, divisor, temp));
1348 result = AssignEnvironment(result);
1354 LInstruction* LChunkBuilder::DoMathFloorOfDiv(HMathFloorOfDiv* instr) {
1355 if (instr->RightIsPowerOf2()) {
1356 return DoFlooringDivByPowerOf2I(instr);
1357 }
else if (instr->right()->IsConstant()) {
1358 return DoFlooringDivByConstI(instr);
1360 return DoDivI(instr);
1365 LInstruction* LChunkBuilder::DoModByPowerOf2I(HMod* instr) {
1366 ASSERT(instr->representation().IsSmiOrInteger32());
1367 ASSERT(instr->left()->representation().Equals(instr->representation()));
1368 ASSERT(instr->right()->representation().Equals(instr->representation()));
1369 LOperand* dividend = UseRegisterAtStart(instr->left());
1370 int32_t divisor = instr->right()->GetInteger32Constant();
1371 LInstruction* result = DefineSameAsFirst(
new(zone()) LModByPowerOf2I(
1372 dividend, divisor));
1374 result = AssignEnvironment(result);
1380 LInstruction* LChunkBuilder::DoModByConstI(HMod* instr) {
1381 ASSERT(instr->representation().IsSmiOrInteger32());
1382 ASSERT(instr->left()->representation().Equals(instr->representation()));
1383 ASSERT(instr->right()->representation().Equals(instr->representation()));
1384 LOperand* dividend = UseRegister(instr->left());
1385 int32_t divisor = instr->right()->GetInteger32Constant();
1386 LInstruction* result = DefineAsRegister(
new(zone()) LModByConstI(
1387 dividend, divisor));
1389 result = AssignEnvironment(result);
1395 LInstruction* LChunkBuilder::DoModI(HMod* instr) {
1396 ASSERT(instr->representation().IsSmiOrInteger32());
1397 ASSERT(instr->left()->representation().Equals(instr->representation()));
1398 ASSERT(instr->right()->representation().Equals(instr->representation()));
1399 LOperand* dividend = UseRegister(instr->left());
1400 LOperand* divisor = UseRegister(instr->right());
1401 LInstruction* result = DefineAsRegister(
new(zone()) LModI(
1402 dividend, divisor));
1405 result = AssignEnvironment(result);
1411 LInstruction* LChunkBuilder::DoMod(HMod* instr) {
1412 if (instr->representation().IsSmiOrInteger32()) {
1413 return instr->RightIsPowerOf2() ? DoModByPowerOf2I(instr) : DoModI(instr);
1414 }
else if (instr->representation().IsDouble()) {
1415 return DoArithmeticD(Token::MOD, instr);
1417 return DoArithmeticT(Token::MOD, instr);
1422 LInstruction* LChunkBuilder::DoMul(HMul* instr) {
1423 if (instr->representation().IsSmiOrInteger32()) {
1424 ASSERT(instr->left()->representation().Equals(instr->representation()));
1425 ASSERT(instr->right()->representation().Equals(instr->representation()));
1426 HValue* left = instr->BetterLeftOperand();
1427 HValue* right = instr->BetterRightOperand();
1433 if (right->IsConstant()) {
1434 HConstant* constant = HConstant::cast(right);
1435 int32_t constant_value = constant->Integer32Value();
1438 if (!can_overflow || ((constant_value >= -1) && (constant_value <= 1))) {
1439 left_op = UseRegisterAtStart(left);
1440 right_op = UseConstant(right);
1442 if (bailout_on_minus_zero) {
1443 left_op = UseRegister(left);
1445 left_op = UseRegisterAtStart(left);
1447 right_op = UseRegister(right);
1450 if (bailout_on_minus_zero) {
1451 left_op = UseRegister(left);
1453 left_op = UseRegisterAtStart(left);
1455 right_op = UseRegister(right);
1457 LMulI* mul =
new(zone()) LMulI(left_op, right_op);
1458 if (can_overflow || bailout_on_minus_zero) {
1459 AssignEnvironment(mul);
1461 return DefineAsRegister(mul);
1463 }
else if (instr->representation().IsDouble()) {
1465 if (instr->UseCount() == 1 && instr->uses().value()->IsAdd()) {
1466 HAdd* add = HAdd::cast(instr->uses().value());
1467 if (instr == add->left()) {
1472 if (instr == add->right() && !add->left()->IsMul()) {
1486 LInstruction* LChunkBuilder::DoSub(HSub* instr) {
1487 if (instr->representation().IsSmiOrInteger32()) {
1488 ASSERT(instr->left()->representation().Equals(instr->representation()));
1489 ASSERT(instr->right()->representation().Equals(instr->representation()));
1490 LOperand* left = UseRegisterAtStart(instr->left());
1491 LOperand* right = UseOrConstantAtStart(instr->right());
1492 LSubI* sub =
new(zone()) LSubI(left, right);
1493 LInstruction* result = DefineAsRegister(sub);
1495 result = AssignEnvironment(result);
1498 }
else if (instr->representation().IsDouble()) {
1506 LInstruction* LChunkBuilder::DoMultiplyAdd(HMul* mul, HValue* addend) {
1507 LOperand* multiplier_op = UseRegisterAtStart(mul->left());
1508 LOperand* multiplicand_op = UseRegisterAtStart(mul->right());
1509 LOperand* addend_op = UseRegisterAtStart(addend);
1510 return DefineSameAsFirst(
new(zone()) LMultiplyAddD(addend_op, multiplier_op,
1515 LInstruction* LChunkBuilder::DoAdd(HAdd* instr) {
1516 if (instr->representation().IsSmiOrInteger32()) {
1517 ASSERT(instr->left()->representation().Equals(instr->representation()));
1518 ASSERT(instr->right()->representation().Equals(instr->representation()));
1519 LOperand* left = UseRegisterAtStart(instr->BetterLeftOperand());
1520 LOperand* right = UseOrConstantAtStart(instr->BetterRightOperand());
1521 LAddI* add =
new(zone()) LAddI(left, right);
1522 LInstruction* result = DefineAsRegister(add);
1524 result = AssignEnvironment(result);
1527 }
else if (instr->representation().IsExternal()) {
1528 ASSERT(instr->left()->representation().IsExternal());
1529 ASSERT(instr->right()->representation().IsInteger32());
1531 LOperand* left = UseRegisterAtStart(instr->left());
1532 LOperand* right = UseOrConstantAtStart(instr->right());
1533 LAddI* add =
new(zone()) LAddI(left, right);
1534 LInstruction* result = DefineAsRegister(add);
1536 }
else if (instr->representation().IsDouble()) {
1538 if (instr->left()->IsMul())
1539 return DoMultiplyAdd(HMul::cast(instr->left()), instr->right());
1541 if (instr->right()->IsMul()) {
1542 ASSERT(!instr->left()->IsMul());
1543 return DoMultiplyAdd(HMul::cast(instr->right()), instr->left());
1553 LInstruction* LChunkBuilder::DoMathMinMax(HMathMinMax* instr) {
1554 LOperand* left =
NULL;
1555 LOperand* right =
NULL;
1556 if (instr->representation().IsSmiOrInteger32()) {
1557 ASSERT(instr->left()->representation().Equals(instr->representation()));
1558 ASSERT(instr->right()->representation().Equals(instr->representation()));
1559 left = UseRegisterAtStart(instr->BetterLeftOperand());
1560 right = UseOrConstantAtStart(instr->BetterRightOperand());
1562 ASSERT(instr->representation().IsDouble());
1563 ASSERT(instr->left()->representation().IsDouble());
1564 ASSERT(instr->right()->representation().IsDouble());
1565 left = UseRegisterAtStart(instr->left());
1566 right = UseRegisterAtStart(instr->right());
1568 return DefineAsRegister(
new(zone()) LMathMinMax(left, right));
1572 LInstruction* LChunkBuilder::DoPower(HPower* instr) {
1573 ASSERT(instr->representation().IsDouble());
1576 Representation exponent_type = instr->right()->representation();
1577 ASSERT(instr->left()->representation().IsDouble());
1578 LOperand* left = UseFixedDouble(instr->left(),
f2);
1579 LOperand* right = exponent_type.IsDouble() ?
1580 UseFixedDouble(instr->right(),
f4) :
1581 UseFixed(instr->right(), a2);
1582 LPower* result =
new(zone()) LPower(left, right);
1583 return MarkAsCall(DefineFixedDouble(result,
f0),
1585 CAN_DEOPTIMIZE_EAGERLY);
1589 LInstruction* LChunkBuilder::DoCompareGeneric(HCompareGeneric* instr) {
1590 ASSERT(instr->left()->representation().IsTagged());
1591 ASSERT(instr->right()->representation().IsTagged());
1592 LOperand* context = UseFixed(instr->context(),
cp);
1593 LOperand* left = UseFixed(instr->left(), a1);
1594 LOperand* right = UseFixed(instr->right(), a0);
1595 LCmpT* result =
new(zone()) LCmpT(context, left, right);
1596 return MarkAsCall(DefineFixed(result, v0), instr);
1600 LInstruction* LChunkBuilder::DoCompareNumericAndBranch(
1601 HCompareNumericAndBranch* instr) {
1602 Representation r = instr->representation();
1603 if (r.IsSmiOrInteger32()) {
1604 ASSERT(instr->left()->representation().Equals(r));
1605 ASSERT(instr->right()->representation().Equals(r));
1606 LOperand* left = UseRegisterOrConstantAtStart(instr->left());
1607 LOperand* right = UseRegisterOrConstantAtStart(instr->right());
1608 return new(zone()) LCompareNumericAndBranch(left, right);
1611 ASSERT(instr->left()->representation().IsDouble());
1612 ASSERT(instr->right()->representation().IsDouble());
1613 LOperand* left = UseRegisterAtStart(instr->left());
1614 LOperand* right = UseRegisterAtStart(instr->right());
1615 return new(zone()) LCompareNumericAndBranch(left, right);
1620 LInstruction* LChunkBuilder::DoCompareObjectEqAndBranch(
1621 HCompareObjectEqAndBranch* instr) {
1622 LInstruction* goto_instr = CheckElideControlInstruction(instr);
1623 if (goto_instr !=
NULL)
return goto_instr;
1624 LOperand* left = UseRegisterAtStart(instr->left());
1625 LOperand* right = UseRegisterAtStart(instr->right());
1626 return new(zone()) LCmpObjectEqAndBranch(left, right);
1630 LInstruction* LChunkBuilder::DoCompareHoleAndBranch(
1631 HCompareHoleAndBranch* instr) {
1632 LOperand* value = UseRegisterAtStart(instr->value());
1633 return new(zone()) LCmpHoleAndBranch(value);
1637 LInstruction* LChunkBuilder::DoCompareMinusZeroAndBranch(
1638 HCompareMinusZeroAndBranch* instr) {
1639 LInstruction* goto_instr = CheckElideControlInstruction(instr);
1640 if (goto_instr !=
NULL)
return goto_instr;
1641 LOperand* value = UseRegister(instr->value());
1642 LOperand* scratch = TempRegister();
1643 return new(zone()) LCompareMinusZeroAndBranch(value, scratch);
1647 LInstruction* LChunkBuilder::DoIsObjectAndBranch(HIsObjectAndBranch* instr) {
1648 ASSERT(instr->value()->representation().IsTagged());
1649 LOperand* temp = TempRegister();
1650 return new(zone()) LIsObjectAndBranch(UseRegisterAtStart(instr->value()),
1655 LInstruction* LChunkBuilder::DoIsStringAndBranch(HIsStringAndBranch* instr) {
1656 ASSERT(instr->value()->representation().IsTagged());
1657 LOperand* temp = TempRegister();
1658 return new(zone()) LIsStringAndBranch(UseRegisterAtStart(instr->value()),
1663 LInstruction* LChunkBuilder::DoIsSmiAndBranch(HIsSmiAndBranch* instr) {
1664 ASSERT(instr->value()->representation().IsTagged());
1665 return new(zone()) LIsSmiAndBranch(Use(instr->value()));
1669 LInstruction* LChunkBuilder::DoIsUndetectableAndBranch(
1670 HIsUndetectableAndBranch* instr) {
1671 ASSERT(instr->value()->representation().IsTagged());
1672 return new(zone()) LIsUndetectableAndBranch(
1673 UseRegisterAtStart(instr->value()), TempRegister());
1677 LInstruction* LChunkBuilder::DoStringCompareAndBranch(
1678 HStringCompareAndBranch* instr) {
1679 ASSERT(instr->left()->representation().IsTagged());
1680 ASSERT(instr->right()->representation().IsTagged());
1681 LOperand* context = UseFixed(instr->context(),
cp);
1682 LOperand* left = UseFixed(instr->left(), a1);
1683 LOperand* right = UseFixed(instr->right(), a0);
1684 LStringCompareAndBranch* result =
1685 new(zone()) LStringCompareAndBranch(context, left, right);
1686 return MarkAsCall(result, instr);
1690 LInstruction* LChunkBuilder::DoHasInstanceTypeAndBranch(
1691 HHasInstanceTypeAndBranch* instr) {
1692 ASSERT(instr->value()->representation().IsTagged());
1693 LOperand* value = UseRegisterAtStart(instr->value());
1694 return new(zone()) LHasInstanceTypeAndBranch(value);
1698 LInstruction* LChunkBuilder::DoGetCachedArrayIndex(
1699 HGetCachedArrayIndex* instr) {
1700 ASSERT(instr->value()->representation().IsTagged());
1701 LOperand* value = UseRegisterAtStart(instr->value());
1703 return DefineAsRegister(
new(zone()) LGetCachedArrayIndex(value));
1707 LInstruction* LChunkBuilder::DoHasCachedArrayIndexAndBranch(
1708 HHasCachedArrayIndexAndBranch* instr) {
1709 ASSERT(instr->value()->representation().IsTagged());
1710 return new(zone()) LHasCachedArrayIndexAndBranch(
1711 UseRegisterAtStart(instr->value()));
1715 LInstruction* LChunkBuilder::DoClassOfTestAndBranch(
1716 HClassOfTestAndBranch* instr) {
1717 ASSERT(instr->value()->representation().IsTagged());
1718 return new(zone()) LClassOfTestAndBranch(UseRegister(instr->value()),
1723 LInstruction* LChunkBuilder::DoMapEnumLength(HMapEnumLength* instr) {
1724 LOperand*
map = UseRegisterAtStart(instr->value());
1725 return DefineAsRegister(
new(zone()) LMapEnumLength(map));
1729 LInstruction* LChunkBuilder::DoDateField(HDateField* instr) {
1730 LOperand*
object = UseFixed(instr->value(), a0);
1731 LDateField* result =
1732 new(zone()) LDateField(
object, FixedTemp(a1), instr->index());
1733 return MarkAsCall(DefineFixed(result, v0), instr, CAN_DEOPTIMIZE_EAGERLY);
1737 LInstruction* LChunkBuilder::DoSeqStringGetChar(HSeqStringGetChar* instr) {
1738 LOperand*
string = UseRegisterAtStart(instr->string());
1739 LOperand* index = UseRegisterOrConstantAtStart(instr->index());
1740 return DefineAsRegister(
new(zone()) LSeqStringGetChar(
string, index));
1744 LInstruction* LChunkBuilder::DoSeqStringSetChar(HSeqStringSetChar* instr) {
1745 LOperand*
string = UseRegisterAtStart(instr->string());
1746 LOperand* index = FLAG_debug_code
1747 ? UseRegisterAtStart(instr->index())
1748 : UseRegisterOrConstantAtStart(instr->index());
1749 LOperand* value = UseRegisterAtStart(instr->value());
1750 LOperand* context = FLAG_debug_code ? UseFixed(instr->context(),
cp) :
NULL;
1751 return new(zone()) LSeqStringSetChar(context,
string, index, value);
1755 LInstruction* LChunkBuilder::DoBoundsCheck(HBoundsCheck* instr) {
1756 LOperand* value = UseRegisterOrConstantAtStart(instr->index());
1757 LOperand* length = UseRegister(instr->length());
1758 return AssignEnvironment(
new(zone()) LBoundsCheck(value, length));
1762 LInstruction* LChunkBuilder::DoBoundsCheckBaseIndexInformation(
1763 HBoundsCheckBaseIndexInformation* instr) {
1769 LInstruction* LChunkBuilder::DoAbnormalExit(HAbnormalExit* instr) {
1776 LInstruction* LChunkBuilder::DoUseConst(HUseConst* instr) {
1781 LInstruction* LChunkBuilder::DoForceRepresentation(HForceRepresentation* bad) {
1789 LInstruction* LChunkBuilder::DoChange(HChange* instr) {
1790 Representation from = instr->from();
1791 Representation to = instr->to();
1793 if (to.IsTagged()) {
1794 LOperand* value = UseRegister(instr->value());
1795 return DefineSameAsFirst(
new(zone()) LDummyUse(value));
1799 if (from.IsTagged()) {
1800 if (to.IsDouble()) {
1801 LOperand* value = UseRegister(instr->value());
1802 LNumberUntagD* res =
new(zone()) LNumberUntagD(value);
1803 return AssignEnvironment(DefineAsRegister(res));
1804 }
else if (to.IsSmi()) {
1805 HValue* val = instr->value();
1806 LOperand* value = UseRegister(val);
1807 if (val->type().IsSmi()) {
1808 return DefineSameAsFirst(
new(zone()) LDummyUse(value));
1810 return AssignEnvironment(DefineSameAsFirst(
new(zone()) LCheckSmi(value)));
1812 ASSERT(to.IsInteger32());
1813 LOperand* value =
NULL;
1814 LInstruction* res =
NULL;
1815 HValue* val = instr->value();
1816 if (val->type().IsSmi() || val->representation().IsSmi()) {
1817 value = UseRegisterAtStart(val);
1818 res = DefineAsRegister(
new(zone()) LSmiUntag(value,
false));
1820 value = UseRegister(val);
1821 LOperand* temp1 = TempRegister();
1822 LOperand* temp2 = FixedTemp(
f22);
1823 res = DefineSameAsFirst(
new(zone()) LTaggedToI(value,
1826 res = AssignEnvironment(res);
1830 }
else if (from.IsDouble()) {
1831 if (to.IsTagged()) {
1832 info()->MarkAsDeferredCalling();
1833 LOperand* value = UseRegister(instr->value());
1834 LOperand* temp1 = TempRegister();
1835 LOperand* temp2 = TempRegister();
1839 LUnallocated* result_temp = TempRegister();
1840 LNumberTagD* result =
new(zone()) LNumberTagD(value, temp1, temp2);
1841 Define(result, result_temp);
1842 return AssignPointerMap(result);
1843 }
else if (to.IsSmi()) {
1844 LOperand* value = UseRegister(instr->value());
1845 return AssignEnvironment(
1846 DefineAsRegister(
new(zone()) LDoubleToSmi(value)));
1848 ASSERT(to.IsInteger32());
1849 LOperand* value = UseRegister(instr->value());
1850 LDoubleToI* res =
new(zone()) LDoubleToI(value);
1851 return AssignEnvironment(DefineAsRegister(res));
1853 }
else if (from.IsInteger32()) {
1854 info()->MarkAsDeferredCalling();
1855 if (to.IsTagged()) {
1856 HValue* val = instr->value();
1857 LOperand* value = UseRegisterAtStart(val);
1859 return DefineAsRegister(
new(zone()) LSmiTag(value));
1861 LOperand* temp1 = TempRegister();
1862 LOperand* temp2 = TempRegister();
1863 LNumberTagU* result =
new(zone()) LNumberTagU(value, temp1, temp2);
1864 return AssignEnvironment(AssignPointerMap(DefineAsRegister(result)));
1866 LOperand* temp1 = TempRegister();
1867 LOperand* temp2 = TempRegister();
1868 LNumberTagI* result =
new(zone()) LNumberTagI(value, temp1, temp2);
1869 return AssignEnvironment(AssignPointerMap(DefineAsRegister(result)));
1871 }
else if (to.IsSmi()) {
1872 HValue* val = instr->value();
1873 LOperand* value = UseRegister(val);
1874 LInstruction* result = DefineAsRegister(
new(zone()) LSmiTag(value));
1876 result = AssignEnvironment(result);
1882 return DefineAsRegister(
1883 new(zone()) LUint32ToDouble(UseRegister(instr->value())));
1885 return DefineAsRegister(
1886 new(zone()) LInteger32ToDouble(Use(instr->value())));
1895 LInstruction* LChunkBuilder::DoCheckHeapObject(HCheckHeapObject* instr) {
1896 LOperand* value = UseRegisterAtStart(instr->value());
1897 return AssignEnvironment(
new(zone()) LCheckNonSmi(value));
1901 LInstruction* LChunkBuilder::DoCheckSmi(HCheckSmi* instr) {
1902 LOperand* value = UseRegisterAtStart(instr->value());
1903 return AssignEnvironment(
new(zone()) LCheckSmi(value));
1907 LInstruction* LChunkBuilder::DoCheckInstanceType(HCheckInstanceType* instr) {
1908 LOperand* value = UseRegisterAtStart(instr->value());
1909 LInstruction* result =
new(zone()) LCheckInstanceType(value);
1910 return AssignEnvironment(result);
1914 LInstruction* LChunkBuilder::DoCheckValue(HCheckValue* instr) {
1915 LOperand* value = UseRegisterAtStart(instr->value());
1916 return AssignEnvironment(
new(zone()) LCheckValue(value));
1920 LInstruction* LChunkBuilder::DoCheckMaps(HCheckMaps* instr) {
1921 LOperand* value =
NULL;
1922 if (!instr->CanOmitMapChecks()) {
1923 value = UseRegisterAtStart(instr->value());
1924 if (instr->has_migration_target())
info()->MarkAsDeferredCalling();
1926 LCheckMaps* result =
new(zone()) LCheckMaps(value);
1927 if (!instr->CanOmitMapChecks()) {
1928 AssignEnvironment(result);
1929 if (instr->has_migration_target())
return AssignPointerMap(result);
1935 LInstruction* LChunkBuilder::DoClampToUint8(HClampToUint8* instr) {
1936 HValue* value = instr->value();
1937 Representation input_rep = value->representation();
1938 LOperand* reg = UseRegister(value);
1939 if (input_rep.IsDouble()) {
1941 return DefineAsRegister(
new(zone()) LClampDToUint8(reg, FixedTemp(
f22)));
1942 }
else if (input_rep.IsInteger32()) {
1943 return DefineAsRegister(
new(zone()) LClampIToUint8(reg));
1945 ASSERT(input_rep.IsSmiOrTagged());
1948 LClampTToUint8* result =
new(zone()) LClampTToUint8(reg, FixedTemp(
f22));
1949 return AssignEnvironment(DefineAsRegister(result));
1954 LInstruction* LChunkBuilder::DoDoubleBits(HDoubleBits* instr) {
1955 HValue* value = instr->value();
1956 ASSERT(value->representation().IsDouble());
1957 return DefineAsRegister(
new(zone()) LDoubleBits(UseRegister(value)));
1961 LInstruction* LChunkBuilder::DoConstructDouble(HConstructDouble* instr) {
1962 LOperand*
lo = UseRegister(instr->lo());
1963 LOperand*
hi = UseRegister(instr->hi());
1964 return DefineAsRegister(
new(zone()) LConstructDouble(hi, lo));
1968 LInstruction* LChunkBuilder::DoReturn(HReturn* instr) {
1969 LOperand* context =
info()->IsStub()
1970 ? UseFixed(instr->context(),
cp)
1972 LOperand* parameter_count = UseRegisterOrConstant(instr->parameter_count());
1973 return new(zone()) LReturn(UseFixed(instr->value(), v0), context,
1978 LInstruction* LChunkBuilder::DoConstant(HConstant* instr) {
1979 Representation r = instr->representation();
1981 return DefineAsRegister(
new(zone()) LConstantS);
1982 }
else if (r.IsInteger32()) {
1983 return DefineAsRegister(
new(zone()) LConstantI);
1984 }
else if (r.IsDouble()) {
1985 return DefineAsRegister(
new(zone()) LConstantD);
1986 }
else if (r.IsExternal()) {
1987 return DefineAsRegister(
new(zone()) LConstantE);
1988 }
else if (r.IsTagged()) {
1989 return DefineAsRegister(
new(zone()) LConstantT);
1997 LInstruction* LChunkBuilder::DoLoadGlobalCell(HLoadGlobalCell* instr) {
1998 LLoadGlobalCell* result =
new(zone()) LLoadGlobalCell;
1999 return instr->RequiresHoleCheck()
2000 ? AssignEnvironment(DefineAsRegister(result))
2001 : DefineAsRegister(result);
2005 LInstruction* LChunkBuilder::DoLoadGlobalGeneric(HLoadGlobalGeneric* instr) {
2006 LOperand* context = UseFixed(instr->context(),
cp);
2007 LOperand* global_object = UseFixed(instr->global_object(), a0);
2008 LLoadGlobalGeneric* result =
2009 new(zone()) LLoadGlobalGeneric(context, global_object);
2010 return MarkAsCall(DefineFixed(result, v0), instr);
2014 LInstruction* LChunkBuilder::DoStoreGlobalCell(HStoreGlobalCell* instr) {
2015 LOperand* value = UseRegister(instr->value());
2018 return instr->RequiresHoleCheck()
2019 ? AssignEnvironment(
new(zone()) LStoreGlobalCell(value, TempRegister()))
2020 : new(zone()) LStoreGlobalCell(value,
NULL);
2024 LInstruction* LChunkBuilder::DoLoadContextSlot(HLoadContextSlot* instr) {
2025 LOperand* context = UseRegisterAtStart(instr->value());
2026 LInstruction* result =
2027 DefineAsRegister(
new(zone()) LLoadContextSlot(context));
2028 return instr->RequiresHoleCheck() ? AssignEnvironment(result) : result;
2032 LInstruction* LChunkBuilder::DoStoreContextSlot(HStoreContextSlot* instr) {
2035 if (instr->NeedsWriteBarrier()) {
2036 context = UseTempRegister(instr->context());
2037 value = UseTempRegister(instr->value());
2039 context = UseRegister(instr->context());
2040 value = UseRegister(instr->value());
2042 LInstruction* result =
new(zone()) LStoreContextSlot(context, value);
2043 return instr->RequiresHoleCheck() ? AssignEnvironment(result) : result;
2047 LInstruction* LChunkBuilder::DoLoadNamedField(HLoadNamedField* instr) {
2048 LOperand*
obj = UseRegisterAtStart(instr->object());
2049 return DefineAsRegister(
new(zone()) LLoadNamedField(obj));
2053 LInstruction* LChunkBuilder::DoLoadNamedGeneric(HLoadNamedGeneric* instr) {
2054 LOperand* context = UseFixed(instr->context(),
cp);
2055 LOperand*
object = UseFixed(instr->object(), a0);
2056 LInstruction* result =
2057 DefineFixed(
new(zone()) LLoadNamedGeneric(context,
object), v0);
2058 return MarkAsCall(result, instr);
2062 LInstruction* LChunkBuilder::DoLoadFunctionPrototype(
2063 HLoadFunctionPrototype* instr) {
2064 return AssignEnvironment(DefineAsRegister(
2065 new(zone()) LLoadFunctionPrototype(UseRegister(instr->function()))));
2069 LInstruction* LChunkBuilder::DoLoadRoot(HLoadRoot* instr) {
2070 return DefineAsRegister(
new(zone()) LLoadRoot);
2074 LInstruction* LChunkBuilder::DoLoadKeyed(HLoadKeyed* instr) {
2075 ASSERT(instr->key()->representation().IsSmiOrInteger32());
2077 LOperand* key = UseRegisterOrConstantAtStart(instr->key());
2078 LLoadKeyed* result =
NULL;
2080 if (!instr->is_typed_elements()) {
2081 LOperand* obj =
NULL;
2082 if (instr->representation().IsDouble()) {
2083 obj = UseRegister(instr->elements());
2085 ASSERT(instr->representation().IsSmiOrTagged());
2086 obj = UseRegisterAtStart(instr->elements());
2088 result =
new(zone()) LLoadKeyed(obj, key);
2091 (instr->representation().IsInteger32() &&
2093 (instr->representation().IsDouble() &&
2095 LOperand* backing_store = UseRegister(instr->elements());
2096 result =
new(zone()) LLoadKeyed(backing_store, key);
2099 DefineAsRegister(result);
2102 bool can_deoptimize = instr->RequiresHoleCheck() ||
2105 return can_deoptimize ? AssignEnvironment(result) : result;
2109 LInstruction* LChunkBuilder::DoLoadKeyedGeneric(HLoadKeyedGeneric* instr) {
2110 LOperand* context = UseFixed(instr->context(),
cp);
2111 LOperand*
object = UseFixed(instr->object(), a1);
2112 LOperand* key = UseFixed(instr->key(), a0);
2114 LInstruction* result =
2115 DefineFixed(
new(zone()) LLoadKeyedGeneric(context,
object, key), v0);
2116 return MarkAsCall(result, instr);
2120 LInstruction* LChunkBuilder::DoStoreKeyed(HStoreKeyed* instr) {
2121 if (!instr->is_typed_elements()) {
2122 ASSERT(instr->elements()->representation().IsTagged());
2123 bool needs_write_barrier = instr->NeedsWriteBarrier();
2124 LOperand*
object =
NULL;
2125 LOperand* val =
NULL;
2126 LOperand* key =
NULL;
2128 if (instr->value()->representation().IsDouble()) {
2129 object = UseRegisterAtStart(instr->elements());
2130 key = UseRegisterOrConstantAtStart(instr->key());
2131 val = UseRegister(instr->value());
2133 ASSERT(instr->value()->representation().IsSmiOrTagged());
2134 if (needs_write_barrier) {
2135 object = UseTempRegister(instr->elements());
2136 val = UseTempRegister(instr->value());
2137 key = UseTempRegister(instr->key());
2139 object = UseRegisterAtStart(instr->elements());
2140 val = UseRegisterAtStart(instr->value());
2141 key = UseRegisterOrConstantAtStart(instr->key());
2145 return new(zone()) LStoreKeyed(
object, key, val);
2149 (instr->value()->representation().IsInteger32() &&
2151 (instr->value()->representation().IsDouble() &&
2153 ASSERT((instr->is_fixed_typed_array() &&
2154 instr->elements()->representation().IsTagged()) ||
2155 (instr->is_external() &&
2156 instr->elements()->representation().IsExternal()));
2157 LOperand* val = UseRegister(instr->value());
2158 LOperand* key = UseRegisterOrConstantAtStart(instr->key());
2159 LOperand* backing_store = UseRegister(instr->elements());
2160 return new(zone()) LStoreKeyed(backing_store, key, val);
2164 LInstruction* LChunkBuilder::DoStoreKeyedGeneric(HStoreKeyedGeneric* instr) {
2165 LOperand* context = UseFixed(instr->context(),
cp);
2166 LOperand* obj = UseFixed(instr->object(), a2);
2167 LOperand* key = UseFixed(instr->key(), a1);
2168 LOperand* val = UseFixed(instr->value(), a0);
2170 ASSERT(instr->object()->representation().IsTagged());
2171 ASSERT(instr->key()->representation().IsTagged());
2172 ASSERT(instr->value()->representation().IsTagged());
2175 new(zone()) LStoreKeyedGeneric(context, obj, key, val), instr);
2179 LInstruction* LChunkBuilder::DoTransitionElementsKind(
2180 HTransitionElementsKind* instr) {
2181 LOperand*
object = UseRegister(instr->object());
2183 LOperand* new_map_reg = TempRegister();
2184 LTransitionElementsKind* result =
2185 new(zone()) LTransitionElementsKind(
object,
NULL, new_map_reg);
2188 LOperand* context = UseFixed(instr->context(),
cp);
2189 LTransitionElementsKind* result =
2190 new(zone()) LTransitionElementsKind(
object, context,
NULL);
2191 return AssignPointerMap(result);
2196 LInstruction* LChunkBuilder::DoTrapAllocationMemento(
2197 HTrapAllocationMemento* instr) {
2198 LOperand*
object = UseRegister(instr->object());
2199 LOperand* temp = TempRegister();
2200 LTrapAllocationMemento* result =
2201 new(zone()) LTrapAllocationMemento(
object, temp);
2202 return AssignEnvironment(result);
2206 LInstruction* LChunkBuilder::DoStoreNamedField(HStoreNamedField* instr) {
2207 bool is_in_object = instr->access().IsInobject();
2208 bool needs_write_barrier = instr->NeedsWriteBarrier();
2209 bool needs_write_barrier_for_map = instr->has_transition() &&
2210 instr->NeedsWriteBarrierForMap();
2213 if (needs_write_barrier) {
2215 ? UseRegister(instr->object())
2216 : UseTempRegister(instr->object());
2218 obj = needs_write_barrier_for_map
2219 ? UseRegister(instr->object())
2220 : UseRegisterAtStart(instr->object());
2224 if (needs_write_barrier || instr->field_representation().IsSmi()) {
2225 val = UseTempRegister(instr->value());
2226 }
else if (instr->field_representation().IsDouble()) {
2227 val = UseRegisterAtStart(instr->value());
2229 val = UseRegister(instr->value());
2233 LOperand* temp = needs_write_barrier_for_map ? TempRegister() :
NULL;
2235 LStoreNamedField* result =
new(zone()) LStoreNamedField(obj, val, temp);
2236 if (instr->field_representation().IsHeapObject()) {
2237 if (!instr->value()->type().IsHeapObject()) {
2238 return AssignEnvironment(result);
2245 LInstruction* LChunkBuilder::DoStoreNamedGeneric(HStoreNamedGeneric* instr) {
2246 LOperand* context = UseFixed(instr->context(),
cp);
2247 LOperand* obj = UseFixed(instr->object(), a1);
2248 LOperand* val = UseFixed(instr->value(), a0);
2250 LInstruction* result =
new(zone()) LStoreNamedGeneric(context, obj, val);
2251 return MarkAsCall(result, instr);
2255 LInstruction* LChunkBuilder::DoStringAdd(HStringAdd* instr) {
2256 LOperand* context = UseFixed(instr->context(),
cp);
2257 LOperand* left = UseFixed(instr->left(), a1);
2258 LOperand* right = UseFixed(instr->right(), a0);
2260 DefineFixed(
new(zone()) LStringAdd(context, left, right), v0),
2265 LInstruction* LChunkBuilder::DoStringCharCodeAt(HStringCharCodeAt* instr) {
2266 LOperand*
string = UseTempRegister(instr->string());
2267 LOperand* index = UseTempRegister(instr->index());
2268 LOperand* context = UseAny(instr->context());
2269 LStringCharCodeAt* result =
2270 new(zone()) LStringCharCodeAt(context,
string, index);
2271 return AssignEnvironment(AssignPointerMap(DefineAsRegister(result)));
2275 LInstruction* LChunkBuilder::DoStringCharFromCode(HStringCharFromCode* instr) {
2276 LOperand* char_code = UseRegister(instr->value());
2277 LOperand* context = UseAny(instr->context());
2278 LStringCharFromCode* result =
2279 new(zone()) LStringCharFromCode(context, char_code);
2280 return AssignPointerMap(DefineAsRegister(result));
2284 LInstruction* LChunkBuilder::DoAllocate(HAllocate* instr) {
2285 info()->MarkAsDeferredCalling();
2286 LOperand* context = UseAny(instr->context());
2287 LOperand*
size = instr->size()->IsConstant()
2288 ? UseConstant(instr->size())
2289 : UseTempRegister(instr->size());
2290 LOperand* temp1 = TempRegister();
2291 LOperand* temp2 = TempRegister();
2292 LAllocate* result =
new(zone()) LAllocate(context, size, temp1, temp2);
2293 return AssignPointerMap(DefineAsRegister(result));
2297 LInstruction* LChunkBuilder::DoRegExpLiteral(HRegExpLiteral* instr) {
2298 LOperand* context = UseFixed(instr->context(),
cp);
2300 DefineFixed(
new(zone()) LRegExpLiteral(context), v0), instr);
2304 LInstruction* LChunkBuilder::DoFunctionLiteral(HFunctionLiteral* instr) {
2305 LOperand* context = UseFixed(instr->context(),
cp);
2307 DefineFixed(
new(zone()) LFunctionLiteral(context), v0), instr);
2311 LInstruction* LChunkBuilder::DoOsrEntry(HOsrEntry* instr) {
2312 ASSERT(argument_count_ == 0);
2313 allocator_->MarkAsOsrEntry();
2314 current_block_->last_environment()->set_ast_id(instr->ast_id());
2315 return AssignEnvironment(
new(zone()) LOsrEntry);
2319 LInstruction* LChunkBuilder::DoParameter(HParameter* instr) {
2320 LParameter* result =
new(zone()) LParameter;
2321 if (instr->kind() == HParameter::STACK_PARAMETER) {
2322 int spill_index = chunk()->GetParameterStackSlot(instr->index());
2323 return DefineAsSpilled(result, spill_index);
2326 CodeStubInterfaceDescriptor* descriptor =
2327 info()->code_stub()->GetInterfaceDescriptor(
info()->isolate());
2328 int index =
static_cast<int>(instr->index());
2329 Register reg = descriptor->GetParameterRegister(index);
2330 return DefineFixed(result, reg);
2335 LInstruction* LChunkBuilder::DoUnknownOSRValue(HUnknownOSRValue* instr) {
2338 int env_index = instr->index();
2339 int spill_index = 0;
2340 if (instr->environment()->is_parameter_index(env_index)) {
2341 spill_index = chunk()->GetParameterStackSlot(env_index);
2343 spill_index = env_index - instr->environment()->first_local_index();
2345 Abort(kTooManySpillSlotsNeededForOSR);
2349 return DefineAsSpilled(
new(zone()) LUnknownOSRValue, spill_index);
2353 LInstruction* LChunkBuilder::DoCallStub(HCallStub* instr) {
2354 LOperand* context = UseFixed(instr->context(),
cp);
2355 return MarkAsCall(DefineFixed(
new(zone()) LCallStub(context), v0), instr);
2359 LInstruction* LChunkBuilder::DoArgumentsObject(HArgumentsObject* instr) {
2368 LInstruction* LChunkBuilder::DoCapturedObject(HCapturedObject* instr) {
2369 instr->ReplayEnvironment(current_block_->last_environment());
2376 LInstruction* LChunkBuilder::DoAccessArgumentsAt(HAccessArgumentsAt* instr) {
2377 info()->MarkAsRequiresFrame();
2378 LOperand* args = UseRegister(instr->arguments());
2379 LOperand* length = UseRegisterOrConstantAtStart(instr->length());
2380 LOperand* index = UseRegisterOrConstantAtStart(instr->index());
2381 return DefineAsRegister(
new(zone()) LAccessArgumentsAt(args, length, index));
2385 LInstruction* LChunkBuilder::DoToFastProperties(HToFastProperties* instr) {
2386 LOperand*
object = UseFixed(instr->value(), a0);
2387 LToFastProperties* result =
new(zone()) LToFastProperties(
object);
2388 return MarkAsCall(DefineFixed(result, v0), instr);
2392 LInstruction* LChunkBuilder::DoTypeof(HTypeof* instr) {
2393 LOperand* context = UseFixed(instr->context(),
cp);
2394 LTypeof* result =
new(zone()) LTypeof(context, UseFixed(instr->value(), a0));
2395 return MarkAsCall(DefineFixed(result, v0), instr);
2399 LInstruction* LChunkBuilder::DoTypeofIsAndBranch(HTypeofIsAndBranch* instr) {
2400 LInstruction* goto_instr = CheckElideControlInstruction(instr);
2401 if (goto_instr !=
NULL)
return goto_instr;
2403 return new(zone()) LTypeofIsAndBranch(UseTempRegister(instr->value()));
2407 LInstruction* LChunkBuilder::DoIsConstructCallAndBranch(
2408 HIsConstructCallAndBranch* instr) {
2409 return new(zone()) LIsConstructCallAndBranch(TempRegister());
2413 LInstruction* LChunkBuilder::DoSimulate(HSimulate* instr) {
2414 instr->ReplayEnvironment(current_block_->last_environment());
2419 LInstruction* LChunkBuilder::DoStackCheck(HStackCheck* instr) {
2420 if (instr->is_function_entry()) {
2421 LOperand* context = UseFixed(instr->context(),
cp);
2422 return MarkAsCall(
new(zone()) LStackCheck(context), instr);
2424 ASSERT(instr->is_backwards_branch());
2425 LOperand* context = UseAny(instr->context());
2426 return AssignEnvironment(
2427 AssignPointerMap(
new(zone()) LStackCheck(context)));
2432 LInstruction* LChunkBuilder::DoEnterInlined(HEnterInlined* instr) {
2433 HEnvironment* outer = current_block_->last_environment();
2434 HConstant* undefined = graph()->GetConstantUndefined();
2435 HEnvironment* inner = outer->CopyForInlining(instr->closure(),
2436 instr->arguments_count(),
2439 instr->inlining_kind());
2441 if (instr->arguments_var() !=
NULL && instr->arguments_object()->IsLinked()) {
2442 inner->Bind(instr->arguments_var(), instr->arguments_object());
2444 inner->set_entry(instr);
2445 current_block_->UpdateEnvironment(inner);
2446 chunk_->AddInlinedClosure(instr->closure());
2451 LInstruction* LChunkBuilder::DoLeaveInlined(HLeaveInlined* instr) {
2452 LInstruction* pop =
NULL;
2454 HEnvironment* env = current_block_->last_environment();
2456 if (env->entry()->arguments_pushed()) {
2457 int argument_count = env->arguments_environment()->parameter_count();
2458 pop =
new(zone()) LDrop(argument_count);
2459 ASSERT(instr->argument_delta() == -argument_count);
2462 HEnvironment* outer = current_block_->last_environment()->
2463 DiscardInlined(
false);
2464 current_block_->UpdateEnvironment(outer);
2470 LInstruction* LChunkBuilder::DoForInPrepareMap(HForInPrepareMap* instr) {
2471 LOperand* context = UseFixed(instr->context(),
cp);
2472 LOperand*
object = UseFixed(instr->enumerable(), a0);
2473 LForInPrepareMap* result =
new(zone()) LForInPrepareMap(context,
object);
2474 return MarkAsCall(DefineFixed(result, v0), instr, CAN_DEOPTIMIZE_EAGERLY);
2478 LInstruction* LChunkBuilder::DoForInCacheArray(HForInCacheArray* instr) {
2479 LOperand* map = UseRegister(instr->map());
2480 return AssignEnvironment(DefineAsRegister(
new(zone()) LForInCacheArray(map)));
2484 LInstruction* LChunkBuilder::DoCheckMapValue(HCheckMapValue* instr) {
2485 LOperand* value = UseRegisterAtStart(instr->value());
2486 LOperand* map = UseRegisterAtStart(instr->map());
2487 return AssignEnvironment(
new(zone()) LCheckMapValue(value, map));
2491 LInstruction* LChunkBuilder::DoLoadFieldByIndex(HLoadFieldByIndex* instr) {
2492 LOperand*
object = UseRegister(instr->object());
2493 LOperand* index = UseRegister(instr->index());
2494 return DefineAsRegister(
new(zone()) LLoadFieldByIndex(
object, index));
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter NULL
static LUnallocated * cast(LOperand *op)
friend class TempIterator
const char * ToCString(const v8::String::Utf8Value &value)
void PrintDataTo(StringStream *stream) V8_OVERRIDE
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf map
virtual void PrintDataTo(StringStream *stream) V8_OVERRIDE
static String * cast(Object *obj)
virtual void PrintOutputOperandTo(StringStream *stream)
LEnvironment * environment() const
#define ASSERT(condition)
virtual const char * Mnemonic() const =0
virtual void PrintDataTo(StringStream *stream)
bool IsSimpleMapChangeTransition(ElementsKind from_kind, ElementsKind to_kind)
#define LITHIUM_CONCRETE_INSTRUCTION_LIST(V)
virtual LOperand * result() const =0
static const int kMaxFixedSlotIndex
uint32_t additional_index() const
virtual bool HasResult() const =0
DwVfpRegister DoubleRegister
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object size
#define DEFINE_COMPILE(type)
static const char * String(Value tok)
bool HasEnvironment() const
static int ToAllocationIndex(Register reg)
uint32_t additional_index() const
virtual void PrintTo(StringStream *stream)
LPointerMap * pointer_map() const
const char * ElementsKindToString(ElementsKind kind)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function info
void PrintDataTo(StringStream *stream) V8_OVERRIDE
static int ToAllocationIndex(DwVfpRegister reg)
bool IsDoubleOrFloatElementsKind(ElementsKind kind)
bool HasPointerMap() const
static Representation Tagged()
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in name
static HValue * cast(HValue *value)
void PrintTo(StringStream *stream)