37 #define DEFINE_COMPILE(type) \
38 void L##type::CompileToNative(LCodeGen* generator) { \
39 generator->Do##type(this); \
46 register_spills_[i] =
NULL;
49 double_register_spills_[i] =
NULL;
55 LOperand* spill_operand) {
56 ASSERT(spill_operand->IsStackSlot());
57 ASSERT(register_spills_[allocation_index] ==
NULL);
58 register_spills_[allocation_index] = spill_operand;
63 void LInstruction::VerifyCall() {
71 for (UseIterator it(
this); !it.Done(); it.Advance()) {
73 ASSERT(operand->HasFixedPolicy() ||
74 operand->IsUsedAtStart());
76 for (TempIterator it(
this); !it.Done(); it.Advance()) {
78 ASSERT(operand->HasFixedPolicy() ||!operand->HasRegisterPolicy());
85 LOperand* spill_operand) {
86 ASSERT(spill_operand->IsDoubleStackSlot());
87 ASSERT(double_register_spills_[allocation_index] ==
NULL);
88 double_register_spills_[allocation_index] = spill_operand;
93 stream->Add(
"%s ", this->
Mnemonic());
114 if (i > 0) stream->Add(
" ");
129 stream->Add(
" Dead block replaced with B%d", rep->block_id());
135 for (
int i = 0; i < 4; i++) {
136 if (parallel_moves_[i] !=
NULL && !parallel_moves_[i]->
IsRedundant()) {
146 for (
int i = 0; i < 4; i++) {
148 if (parallel_moves_[i] !=
NULL) {
162 case Token::MOD:
return "mod-d";
175 case Token::MOD:
return "mod-t";
177 case Token::BIT_AND:
return "bit-and-t";
178 case Token::BIT_OR:
return "bit-or-t";
179 case Token::BIT_XOR:
return "bit-xor-t";
180 case Token::SHL:
return "sll-t";
181 case Token::SAR:
return "sra-t";
182 case Token::SHR:
return "srl-t";
220 stream->Add(
"if is_object(");
227 stream->Add(
"if is_string(");
234 stream->Add(
"if is_smi(");
241 stream->Add(
"if is_undetectable(");
248 stream->Add(
"if string_compare(");
256 stream->Add(
"if has_instance_type(");
263 stream->Add(
"if has_cached_array_index(");
270 stream->Add(
"if class_of_test(");
272 stream->Add(
", \"%o\") then B%d else B%d",
273 *hydrogen()->class_name(),
280 stream->Add(
"if typeof ");
282 stream->Add(
" == \"%s\" then B%d else B%d",
289 stream->Add(
"#%d / ",
arity());
294 stream->Add(
"/%s ", hydrogen()->OpName());
315 stream->Add(
" #%d / ",
arity());
320 stream->Add(
"[a2] #%d / ",
arity());
325 SmartArrayPointer<char> name_string =
name()->ToCString();
326 stream->Add(
"%s #%d / ", *name_string,
arity());
331 SmartArrayPointer<char> name_string =
name()->ToCString();
332 stream->Add(
"%s #%d / ", *name_string,
arity());
337 stream->Add(
"#%d / ",
arity());
344 stream->Add(
" #%d / ",
arity());
351 stream->Add(
" length ");
354 stream->Add(
" index ");
381 stream->Add(
"] <- ");
390 stream->Add(
"] <- ");
399 stream->Add(
"] <- ");
411 : spill_slot_count_(0),
414 instructions_(32, graph->zone()),
415 pointer_maps_(8, graph->zone()),
416 inlined_closures_(1, graph->zone()) {
420 int LChunk::GetNextSpillIndex(
bool is_double) {
422 if (is_double) spill_slot_count_++;
423 return spill_slot_count_++;
427 LOperand* LChunk::GetNextSpillSlot(
bool is_double) {
428 int index = GetNextSpillIndex(is_double);
430 return LDoubleStackSlot::Create(index, zone());
432 return LStackSlot::Create(index, zone());
437 void LChunk::MarkEmptyBlocks() {
438 HPhase phase(
"L_Mark empty blocks",
this);
439 for (
int i = 0; i < graph()->blocks()->length(); ++i) {
440 HBasicBlock* block = graph()->blocks()->at(i);
441 int first = block->first_instruction_index();
442 int last = block->last_instruction_index();
446 LLabel* label = LLabel::cast(first_instr);
447 if (last_instr->IsGoto()) {
448 LGoto* goto_instr = LGoto::cast(last_instr);
449 if (label->IsRedundant() &&
450 !label->is_loop_header()) {
451 bool can_eliminate =
true;
452 for (
int i = first + 1; i < last && can_eliminate; ++i) {
455 LGap* gap = LGap::cast(cur);
456 if (!gap->IsRedundant()) {
457 can_eliminate =
false;
460 can_eliminate =
false;
465 label->set_replacement(GetLabel(goto_instr->block_id()));
473 void LChunk::AddInstruction(LInstruction* instr, HBasicBlock* block) {
474 LInstructionGap* gap =
new(graph_->zone()) LInstructionGap(block);
476 if (instr->IsControl()) {
477 instructions_.Add(gap, zone());
478 index = instructions_.length();
479 instructions_.Add(instr, zone());
481 index = instructions_.length();
482 instructions_.Add(instr, zone());
483 instructions_.Add(gap, zone());
485 if (instr->HasPointerMap()) {
486 pointer_maps_.Add(instr->pointer_map(), zone());
487 instr->pointer_map()->set_lithium_position(index);
492 LConstantOperand* LChunk::DefineConstantOperand(HConstant* constant) {
493 return LConstantOperand::Create(constant->id(), zone());
497 int LChunk::GetParameterStackSlot(
int index)
const {
502 int result = index - info()->scope()->num_parameters() - 1;
508 int LChunk::ParameterAt(
int index) {
510 return (1 + info()->scope()->num_parameters() - index) *
515 LGap* LChunk::GetGapAt(
int index)
const {
516 return LGap::cast(instructions_[index]);
520 bool LChunk::IsGapAt(
int index)
const {
521 return instructions_[index]->IsGap();
525 int LChunk::NearestGapPos(
int index)
const {
526 while (!IsGapAt(index)) index--;
531 void LChunk::AddGapMove(
int index, LOperand* from, LOperand* to) {
532 GetGapAt(index)->GetOrCreateParallelMove(
533 LGap::START, zone())->AddMove(from, to, zone());
537 Handle<Object> LChunk::LookupLiteral(LConstantOperand* operand)
const {
538 return HConstant::cast(graph_->LookupValue(operand->index()))->handle();
542 Representation LChunk::LookupLiteralRepresentation(
543 LConstantOperand* operand)
const {
544 return graph_->LookupValue(operand->index())->representation();
548 LChunk* LChunkBuilder::Build() {
550 chunk_ =
new(zone()) LChunk(info(), graph());
551 HPhase phase(
"L_Building chunk", chunk_);
553 const ZoneList<HBasicBlock*>* blocks = graph()->blocks();
554 for (
int i = 0; i < blocks->length(); i++) {
555 HBasicBlock* next =
NULL;
556 if (i < blocks->length() - 1) next = blocks->at(i + 1);
557 DoBasicBlock(blocks->at(i), next);
558 if (is_aborted())
return NULL;
565 void LChunkBuilder::Abort(
const char* format, ...) {
566 if (FLAG_trace_bailout) {
567 SmartArrayPointer<char>
name(
568 info()->shared_info()->DebugName()->
ToCString());
569 PrintF(
"Aborting LChunk building in @\"%s\": ", *
name);
571 va_start(arguments, format);
572 OS::VPrint(format, arguments);
580 LUnallocated* LChunkBuilder::ToUnallocated(Register reg) {
581 return new(zone()) LUnallocated(LUnallocated::FIXED_REGISTER,
582 Register::ToAllocationIndex(reg));
587 return new(zone()) LUnallocated(LUnallocated::FIXED_DOUBLE_REGISTER,
588 DoubleRegister::ToAllocationIndex(reg));
592 LOperand* LChunkBuilder::UseFixed(HValue* value, Register fixed_register) {
593 return Use(value, ToUnallocated(fixed_register));
597 LOperand* LChunkBuilder::UseFixedDouble(HValue* value,
DoubleRegister reg) {
598 return Use(value, ToUnallocated(reg));
602 LOperand* LChunkBuilder::UseRegister(HValue* value) {
603 return Use(value,
new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER));
607 LOperand* LChunkBuilder::UseRegisterAtStart(HValue* value) {
609 new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER,
610 LUnallocated::USED_AT_START));
614 LOperand* LChunkBuilder::UseTempRegister(HValue* value) {
615 return Use(value,
new(zone()) LUnallocated(LUnallocated::WRITABLE_REGISTER));
619 LOperand* LChunkBuilder::Use(HValue* value) {
624 LOperand* LChunkBuilder::UseAtStart(HValue* value) {
626 LUnallocated::USED_AT_START));
630 LOperand* LChunkBuilder::UseOrConstant(HValue* value) {
631 return value->IsConstant()
632 ? chunk_->DefineConstantOperand(HConstant::cast(value))
637 LOperand* LChunkBuilder::UseOrConstantAtStart(HValue* value) {
638 return value->IsConstant()
639 ? chunk_->DefineConstantOperand(HConstant::cast(value))
644 LOperand* LChunkBuilder::UseRegisterOrConstant(HValue* value) {
645 return value->IsConstant()
646 ? chunk_->DefineConstantOperand(HConstant::cast(value))
647 : UseRegister(value);
651 LOperand* LChunkBuilder::UseRegisterOrConstantAtStart(HValue* value) {
652 return value->IsConstant()
653 ? chunk_->DefineConstantOperand(HConstant::cast(value))
654 : UseRegisterAtStart(value);
658 LOperand* LChunkBuilder::UseAny(HValue* value) {
659 return value->IsConstant()
660 ? chunk_->DefineConstantOperand(HConstant::cast(value))
661 : Use(value,
new(zone()) LUnallocated(LUnallocated::ANY));
665 LOperand* LChunkBuilder::Use(HValue* value, LUnallocated* operand) {
666 if (value->EmitAtUses()) {
667 HInstruction* instr = HInstruction::cast(value);
668 VisitInstruction(instr);
670 operand->set_virtual_register(value->id());
675 template<
int I,
int T>
676 LInstruction* LChunkBuilder::Define(LTemplateInstruction<1, I, T>* instr,
677 LUnallocated* result) {
678 result->set_virtual_register(current_instruction_->id());
679 instr->set_result(result);
684 template<
int I,
int T>
685 LInstruction* LChunkBuilder::DefineAsRegister(
686 LTemplateInstruction<1, I, T>* instr) {
688 new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER));
692 template<
int I,
int T>
693 LInstruction* LChunkBuilder::DefineAsSpilled(
694 LTemplateInstruction<1, I, T>* instr,
int index) {
696 new(zone()) LUnallocated(LUnallocated::FIXED_SLOT, index));
700 template<
int I,
int T>
701 LInstruction* LChunkBuilder::DefineSameAsFirst(
702 LTemplateInstruction<1, I, T>* instr) {
704 new(zone()) LUnallocated(LUnallocated::SAME_AS_FIRST_INPUT));
708 template<
int I,
int T>
709 LInstruction* LChunkBuilder::DefineFixed(
710 LTemplateInstruction<1, I, T>* instr, Register reg) {
711 return Define(instr, ToUnallocated(reg));
715 template<
int I,
int T>
716 LInstruction* LChunkBuilder::DefineFixedDouble(
718 return Define(instr, ToUnallocated(reg));
722 LInstruction* LChunkBuilder::AssignEnvironment(LInstruction* instr) {
723 HEnvironment* hydrogen_env = current_block_->last_environment();
724 int argument_index_accumulator = 0;
725 instr->set_environment(CreateEnvironment(hydrogen_env,
726 &argument_index_accumulator));
731 LInstruction* LChunkBuilder::MarkAsCall(LInstruction* instr,
732 HInstruction* hinstr,
733 CanDeoptimize can_deoptimize) {
738 instr = AssignPointerMap(instr);
740 if (hinstr->HasObservableSideEffects()) {
741 ASSERT(hinstr->next()->IsSimulate());
742 HSimulate* sim = HSimulate::cast(hinstr->next());
743 ASSERT(instruction_pending_deoptimization_environment_ ==
NULL);
744 ASSERT(pending_deoptimization_ast_id_ == AstNode::kNoNumber);
745 instruction_pending_deoptimization_environment_ = instr;
746 pending_deoptimization_ast_id_ = sim->ast_id();
753 bool needs_environment =
754 (can_deoptimize == CAN_DEOPTIMIZE_EAGERLY) ||
755 !hinstr->HasObservableSideEffects();
756 if (needs_environment && !instr->HasEnvironment()) {
757 instr = AssignEnvironment(instr);
764 LInstruction* LChunkBuilder::AssignPointerMap(LInstruction* instr) {
765 ASSERT(!instr->HasPointerMap());
766 instr->set_pointer_map(
new(zone()) LPointerMap(position_, zone()));
771 LUnallocated* LChunkBuilder::TempRegister() {
772 LUnallocated* operand =
773 new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER);
774 operand->set_virtual_register(allocator_->GetVirtualRegister());
775 if (!allocator_->AllocationOk()) Abort(
"Not enough virtual registers.");
780 LOperand* LChunkBuilder::FixedTemp(Register reg) {
781 LUnallocated* operand = ToUnallocated(reg);
782 ASSERT(operand->HasFixedPolicy());
788 LUnallocated* operand = ToUnallocated(reg);
789 ASSERT(operand->HasFixedPolicy());
794 LInstruction* LChunkBuilder::DoBlockEntry(HBlockEntry* instr) {
795 return new(zone()) LLabel(instr->block());
799 LInstruction* LChunkBuilder::DoSoftDeoptimize(HSoftDeoptimize* instr) {
800 return AssignEnvironment(
new(zone()) LDeoptimize);
804 LInstruction* LChunkBuilder::DoDeoptimize(HDeoptimize* instr) {
805 return AssignEnvironment(
new(zone()) LDeoptimize);
809 LInstruction* LChunkBuilder::DoShift(Token::Value op,
810 HBitwiseBinaryOperation* instr) {
811 if (instr->representation().IsTagged()) {
812 ASSERT(instr->left()->representation().IsTagged());
813 ASSERT(instr->right()->representation().IsTagged());
815 LOperand* left = UseFixed(instr->left(), a1);
816 LOperand* right = UseFixed(instr->right(), a0);
817 LArithmeticT* result =
new(zone()) LArithmeticT(op, left, right);
818 return MarkAsCall(DefineFixed(result, v0), instr);
821 ASSERT(instr->representation().IsInteger32());
822 ASSERT(instr->left()->representation().IsInteger32());
823 ASSERT(instr->right()->representation().IsInteger32());
824 LOperand* left = UseRegisterAtStart(instr->left());
826 HValue* right_value = instr->right();
827 LOperand* right =
NULL;
828 int constant_value = 0;
829 if (right_value->IsConstant()) {
830 HConstant* constant = HConstant::cast(right_value);
831 right = chunk_->DefineConstantOperand(constant);
832 constant_value = constant->Integer32Value() & 0x1f;
834 right = UseRegisterAtStart(right_value);
839 bool may_deopt = (op == Token::SHR && constant_value == 0);
840 bool does_deopt =
false;
842 for (HUseIterator it(instr->uses()); !it.Done(); it.Advance()) {
843 if (!it.value()->CheckFlag(HValue::kTruncatingToInt32)) {
850 LInstruction* result =
851 DefineAsRegister(
new(zone()) LShiftI(op, left, right, does_deopt));
852 return does_deopt ? AssignEnvironment(result) : result;
856 LInstruction* LChunkBuilder::DoArithmeticD(Token::Value op,
857 HArithmeticBinaryOperation* instr) {
858 ASSERT(instr->representation().IsDouble());
859 ASSERT(instr->left()->representation().IsDouble());
860 ASSERT(instr->right()->representation().IsDouble());
862 LOperand* left = UseRegisterAtStart(instr->left());
863 LOperand* right = UseRegisterAtStart(instr->right());
864 LArithmeticD* result =
new(zone()) LArithmeticD(op, left, right);
865 return DefineAsRegister(result);
869 LInstruction* LChunkBuilder::DoArithmeticT(Token::Value op,
870 HArithmeticBinaryOperation* instr) {
876 HValue* left = instr->left();
877 HValue* right = instr->right();
878 ASSERT(left->representation().IsTagged());
879 ASSERT(right->representation().IsTagged());
880 LOperand* left_operand = UseFixed(left, a1);
881 LOperand* right_operand = UseFixed(right, a0);
882 LArithmeticT* result =
883 new(zone()) LArithmeticT(op, left_operand, right_operand);
884 return MarkAsCall(DefineFixed(result, v0), instr);
888 void LChunkBuilder::DoBasicBlock(HBasicBlock* block, HBasicBlock* next_block) {
890 current_block_ = block;
891 next_block_ = next_block;
892 if (block->IsStartBlock()) {
893 block->UpdateEnvironment(graph_->start_environment());
895 }
else if (block->predecessors()->length() == 1) {
898 ASSERT(block->phis()->length() == 0);
899 HBasicBlock* pred = block->predecessors()->at(0);
900 HEnvironment* last_environment = pred->last_environment();
903 if (pred->end()->SecondSuccessor() ==
NULL) {
904 ASSERT(pred->end()->FirstSuccessor() == block);
906 if (pred->end()->FirstSuccessor()->block_id() > block->block_id() ||
907 pred->end()->SecondSuccessor()->block_id() > block->block_id()) {
908 last_environment = last_environment->Copy();
911 block->UpdateEnvironment(last_environment);
912 ASSERT(pred->argument_count() >= 0);
913 argument_count_ = pred->argument_count();
916 HBasicBlock* pred = block->predecessors()->at(0);
918 HEnvironment* last_environment = pred->last_environment();
919 for (
int i = 0; i < block->phis()->length(); ++i) {
920 HPhi* phi = block->phis()->at(i);
921 last_environment->SetValueAt(phi->merged_index(), phi);
923 for (
int i = 0; i < block->deleted_phis()->length(); ++i) {
924 last_environment->SetValueAt(block->deleted_phis()->at(i),
925 graph_->GetConstantUndefined());
927 block->UpdateEnvironment(last_environment);
929 argument_count_ = pred->argument_count();
931 HInstruction* current = block->first();
932 int start = chunk_->instructions()->length();
933 while (current !=
NULL && !is_aborted()) {
935 if (!current->EmitAtUses()) {
936 VisitInstruction(current);
938 current = current->next();
940 int end = chunk_->instructions()->length() - 1;
942 block->set_first_instruction_index(start);
943 block->set_last_instruction_index(end);
945 block->set_argument_count(argument_count_);
947 current_block_ =
NULL;
951 void LChunkBuilder::VisitInstruction(HInstruction* current) {
952 HInstruction* old_current = current_instruction_;
953 current_instruction_ = current;
954 if (current->has_position()) position_ = current->position();
955 LInstruction* instr = current->CompileToLithium(
this);
958 if (FLAG_stress_pointer_maps && !instr->HasPointerMap()) {
959 instr = AssignPointerMap(instr);
961 if (FLAG_stress_environments && !instr->HasEnvironment()) {
962 instr = AssignEnvironment(instr);
964 instr->set_hydrogen_value(current);
965 chunk_->AddInstruction(instr, current_block_);
967 current_instruction_ = old_current;
971 LEnvironment* LChunkBuilder::CreateEnvironment(
972 HEnvironment* hydrogen_env,
973 int* argument_index_accumulator) {
974 if (hydrogen_env ==
NULL)
return NULL;
976 LEnvironment* outer =
977 CreateEnvironment(hydrogen_env->outer(), argument_index_accumulator);
978 int ast_id = hydrogen_env->ast_id();
979 ASSERT(ast_id != AstNode::kNoNumber ||
981 int value_count = hydrogen_env->length();
982 LEnvironment* result =
new(zone()) LEnvironment(
983 hydrogen_env->closure(),
984 hydrogen_env->frame_type(),
986 hydrogen_env->parameter_count(),
991 int argument_index = *argument_index_accumulator;
992 for (
int i = 0; i < value_count; ++i) {
993 if (hydrogen_env->is_special_index(i))
continue;
995 HValue* value = hydrogen_env->values()->at(i);
997 if (value->IsArgumentsObject()) {
999 }
else if (value->IsPushArgument()) {
1000 op =
new(zone()) LArgument(argument_index++);
1004 result->AddValue(op, value->representation());
1008 *argument_index_accumulator = argument_index;
1015 LInstruction* LChunkBuilder::DoGoto(HGoto* instr) {
1016 return new(zone()) LGoto(instr->FirstSuccessor()->block_id());
1020 LInstruction* LChunkBuilder::DoBranch(HBranch* instr) {
1021 HValue* value = instr->value();
1022 if (value->EmitAtUses()) {
1023 HBasicBlock* successor = HConstant::cast(value)->ToBoolean()
1024 ? instr->FirstSuccessor()
1025 : instr->SecondSuccessor();
1026 return new(zone()) LGoto(successor->block_id());
1029 LBranch* result =
new(zone()) LBranch(UseRegister(value));
1032 Representation rep = value->representation();
1033 HType
type = value->type();
1034 if (rep.IsTagged() && !type.IsSmi() && !type.IsBoolean()) {
1035 return AssignEnvironment(result);
1041 LInstruction* LChunkBuilder::DoCompareMap(HCompareMap* instr) {
1042 ASSERT(instr->value()->representation().IsTagged());
1043 LOperand* value = UseRegisterAtStart(instr->value());
1044 LOperand* temp = TempRegister();
1045 return new(zone()) LCmpMapAndBranch(value, temp);
1049 LInstruction* LChunkBuilder::DoArgumentsLength(HArgumentsLength* length) {
1050 return DefineAsRegister(
1051 new(zone()) LArgumentsLength(UseRegister(length->value())));
1055 LInstruction* LChunkBuilder::DoArgumentsElements(HArgumentsElements* elems) {
1056 return DefineAsRegister(
new(zone()) LArgumentsElements);
1060 LInstruction* LChunkBuilder::DoInstanceOf(HInstanceOf* instr) {
1061 LInstanceOf* result =
1062 new(zone()) LInstanceOf(UseFixed(instr->left(), a0),
1063 UseFixed(instr->right(), a1));
1064 return MarkAsCall(DefineFixed(result, v0), instr);
1068 LInstruction* LChunkBuilder::DoInstanceOfKnownGlobal(
1069 HInstanceOfKnownGlobal* instr) {
1070 LInstanceOfKnownGlobal* result =
1071 new(zone()) LInstanceOfKnownGlobal(UseFixed(instr->left(), a0),
1073 return MarkAsCall(DefineFixed(result, v0), instr);
1077 LInstruction* LChunkBuilder::DoWrapReceiver(HWrapReceiver* instr) {
1078 LOperand* receiver = UseRegisterAtStart(instr->receiver());
1079 LOperand*
function = UseRegisterAtStart(instr->function());
1080 LWrapReceiver* result =
new(zone()) LWrapReceiver(receiver,
function);
1081 return AssignEnvironment(DefineSameAsFirst(result));
1085 LInstruction* LChunkBuilder::DoApplyArguments(HApplyArguments* instr) {
1086 LOperand*
function = UseFixed(instr->function(), a1);
1087 LOperand* receiver = UseFixed(instr->receiver(), a0);
1088 LOperand* length = UseFixed(instr->length(), a2);
1089 LOperand* elements = UseFixed(instr->elements(), a3);
1090 LApplyArguments* result =
new(zone()) LApplyArguments(
function,
1094 return MarkAsCall(DefineFixed(result, v0), instr, CAN_DEOPTIMIZE_EAGERLY);
1098 LInstruction* LChunkBuilder::DoPushArgument(HPushArgument* instr) {
1100 LOperand* argument = Use(instr->argument());
1101 return new(zone()) LPushArgument(argument);
1105 LInstruction* LChunkBuilder::DoThisFunction(HThisFunction* instr) {
1106 return instr->HasNoUses()
1108 : DefineAsRegister(
new(zone()) LThisFunction);
1112 LInstruction* LChunkBuilder::DoContext(HContext* instr) {
1113 return instr->HasNoUses() ?
NULL : DefineAsRegister(
new(zone()) LContext);
1117 LInstruction* LChunkBuilder::DoOuterContext(HOuterContext* instr) {
1118 LOperand* context = UseRegisterAtStart(instr->value());
1119 return DefineAsRegister(
new(zone()) LOuterContext(context));
1123 LInstruction* LChunkBuilder::DoDeclareGlobals(HDeclareGlobals* instr) {
1124 return MarkAsCall(
new(zone()) LDeclareGlobals, instr);
1128 LInstruction* LChunkBuilder::DoGlobalObject(HGlobalObject* instr) {
1129 LOperand* context = UseRegisterAtStart(instr->value());
1130 return DefineAsRegister(
new(zone()) LGlobalObject(context));
1134 LInstruction* LChunkBuilder::DoGlobalReceiver(HGlobalReceiver* instr) {
1135 LOperand* global_object = UseRegisterAtStart(instr->value());
1136 return DefineAsRegister(
new(zone()) LGlobalReceiver(global_object));
1140 LInstruction* LChunkBuilder::DoCallConstantFunction(
1141 HCallConstantFunction* instr) {
1142 argument_count_ -= instr->argument_count();
1143 return MarkAsCall(DefineFixed(
new(zone()) LCallConstantFunction, v0), instr);
1147 LInstruction* LChunkBuilder::DoInvokeFunction(HInvokeFunction* instr) {
1148 LOperand*
function = UseFixed(instr->function(), a1);
1149 argument_count_ -= instr->argument_count();
1150 LInvokeFunction* result =
new(zone()) LInvokeFunction(
function);
1151 return MarkAsCall(DefineFixed(result, v0), instr, CANNOT_DEOPTIMIZE_EAGERLY);
1155 LInstruction* LChunkBuilder::DoUnaryMathOperation(HUnaryMathOperation* instr) {
1157 if (op == kMathLog || op == kMathSin || op == kMathCos || op == kMathTan) {
1158 LOperand* input = UseFixedDouble(instr->value(),
f4);
1159 LUnaryMathOperation* result =
new(zone()) LUnaryMathOperation(input,
NULL);
1160 return MarkAsCall(DefineFixedDouble(result,
f4), instr);
1164 LOperand* input = UseFixedDouble(instr->value(),
f8);
1165 LOperand* temp = FixedTemp(
f6);
1166 LUnaryMathOperation* result =
new(zone()) LUnaryMathOperation(input, temp);
1167 return DefineFixedDouble(result,
f4);
1169 LOperand* input = UseRegisterAtStart(instr->value());
1170 LOperand* temp = (op == kMathFloor) ? TempRegister() :
NULL;
1171 LUnaryMathOperation* result =
new(zone()) LUnaryMathOperation(input, temp);
1174 return AssignEnvironment(AssignPointerMap(DefineAsRegister(result)));
1176 return AssignEnvironment(AssignPointerMap(DefineAsRegister(result)));
1178 return DefineAsRegister(result);
1180 return AssignEnvironment(DefineAsRegister(result));
1189 LInstruction* LChunkBuilder::DoCallKeyed(HCallKeyed* instr) {
1190 ASSERT(instr->key()->representation().IsTagged());
1191 argument_count_ -= instr->argument_count();
1192 LOperand* key = UseFixed(instr->key(), a2);
1193 return MarkAsCall(DefineFixed(
new(zone()) LCallKeyed(key), v0), instr);
1197 LInstruction* LChunkBuilder::DoCallNamed(HCallNamed* instr) {
1198 argument_count_ -= instr->argument_count();
1199 return MarkAsCall(DefineFixed(
new(zone()) LCallNamed, v0), instr);
1203 LInstruction* LChunkBuilder::DoCallGlobal(HCallGlobal* instr) {
1204 argument_count_ -= instr->argument_count();
1205 return MarkAsCall(DefineFixed(
new(zone()) LCallGlobal, v0), instr);
1209 LInstruction* LChunkBuilder::DoCallKnownGlobal(HCallKnownGlobal* instr) {
1210 argument_count_ -= instr->argument_count();
1211 return MarkAsCall(DefineFixed(
new(zone()) LCallKnownGlobal, v0), instr);
1215 LInstruction* LChunkBuilder::DoCallNew(HCallNew* instr) {
1216 LOperand* constructor = UseFixed(instr->constructor(), a1);
1217 argument_count_ -= instr->argument_count();
1218 LCallNew* result =
new(zone()) LCallNew(constructor);
1219 return MarkAsCall(DefineFixed(result, v0), instr);
1223 LInstruction* LChunkBuilder::DoCallFunction(HCallFunction* instr) {
1224 LOperand*
function = UseFixed(instr->function(), a1);
1225 argument_count_ -= instr->argument_count();
1226 return MarkAsCall(DefineFixed(
new(zone()) LCallFunction(
function), v0),
1231 LInstruction* LChunkBuilder::DoCallRuntime(HCallRuntime* instr) {
1232 argument_count_ -= instr->argument_count();
1233 return MarkAsCall(DefineFixed(
new(zone()) LCallRuntime, v0), instr);
1237 LInstruction* LChunkBuilder::DoShr(HShr* instr) {
1238 return DoShift(Token::SHR, instr);
1242 LInstruction* LChunkBuilder::DoSar(HSar* instr) {
1243 return DoShift(Token::SAR, instr);
1247 LInstruction* LChunkBuilder::DoShl(HShl* instr) {
1248 return DoShift(Token::SHL, instr);
1252 LInstruction* LChunkBuilder::DoBitwise(HBitwise* instr) {
1253 if (instr->representation().IsInteger32()) {
1254 ASSERT(instr->left()->representation().IsInteger32());
1255 ASSERT(instr->right()->representation().IsInteger32());
1257 LOperand* left = UseRegisterAtStart(instr->LeastConstantOperand());
1258 LOperand* right = UseOrConstantAtStart(instr->MostConstantOperand());
1259 return DefineAsRegister(
new(zone()) LBitI(left, right));
1261 ASSERT(instr->representation().IsTagged());
1262 ASSERT(instr->left()->representation().IsTagged());
1263 ASSERT(instr->right()->representation().IsTagged());
1265 LOperand* left = UseFixed(instr->left(), a1);
1266 LOperand* right = UseFixed(instr->right(), a0);
1267 LArithmeticT* result =
new(zone()) LArithmeticT(instr->op(), left, right);
1268 return MarkAsCall(DefineFixed(result, v0), instr);
1273 LInstruction* LChunkBuilder::DoBitNot(HBitNot* instr) {
1274 ASSERT(instr->value()->representation().IsInteger32());
1275 ASSERT(instr->representation().IsInteger32());
1276 if (instr->HasNoUses())
return NULL;
1277 LOperand* value = UseRegisterAtStart(instr->value());
1278 return DefineAsRegister(
new(zone()) LBitNotI(value));
1282 LInstruction* LChunkBuilder::DoDiv(HDiv* instr) {
1283 if (instr->representation().IsDouble()) {
1285 }
else if (instr->representation().IsInteger32()) {
1291 LOperand* dividend = UseFixed(instr->left(), a0);
1292 LOperand* divisor = UseFixed(instr->right(), a1);
1293 return AssignEnvironment(AssignPointerMap(
1294 DefineFixed(
new(zone()) LDivI(dividend, divisor), v0)));
1301 LInstruction* LChunkBuilder::DoMathFloorOfDiv(HMathFloorOfDiv* instr) {
1307 LInstruction* LChunkBuilder::DoMod(HMod* instr) {
1308 if (instr->representation().IsInteger32()) {
1309 ASSERT(instr->left()->representation().IsInteger32());
1310 ASSERT(instr->right()->representation().IsInteger32());
1313 if (instr->HasPowerOf2Divisor()) {
1314 ASSERT(!instr->CheckFlag(HValue::kCanBeDivByZero));
1315 LOperand* value = UseRegisterAtStart(instr->left());
1316 mod =
new(zone()) LModI(value, UseOrConstant(instr->right()));
1318 LOperand* dividend = UseRegister(instr->left());
1319 LOperand* divisor = UseRegister(instr->right());
1320 mod =
new(zone()) LModI(dividend,
1327 if (instr->CheckFlag(HValue::kBailoutOnMinusZero) ||
1328 instr->CheckFlag(HValue::kCanBeDivByZero)) {
1329 return AssignEnvironment(DefineAsRegister(mod));
1331 return DefineAsRegister(mod);
1333 }
else if (instr->representation().IsTagged()) {
1334 return DoArithmeticT(Token::MOD, instr);
1336 ASSERT(instr->representation().IsDouble());
1340 LOperand* left = UseFixedDouble(instr->left(),
f2);
1341 LOperand* right = UseFixedDouble(instr->right(),
f4);
1342 LArithmeticD* result =
new(zone()) LArithmeticD(Token::MOD, left, right);
1343 return MarkAsCall(DefineFixedDouble(result,
f2), instr);
1348 LInstruction* LChunkBuilder::DoMul(HMul* instr) {
1349 if (instr->representation().IsInteger32()) {
1350 ASSERT(instr->left()->representation().IsInteger32());
1351 ASSERT(instr->right()->representation().IsInteger32());
1353 LOperand* right = UseOrConstant(instr->MostConstantOperand());
1354 LOperand* temp =
NULL;
1355 if (instr->CheckFlag(HValue::kBailoutOnMinusZero) &&
1356 (instr->CheckFlag(HValue::kCanOverflow) ||
1357 !right->IsConstantOperand())) {
1358 left = UseRegister(instr->LeastConstantOperand());
1359 temp = TempRegister();
1361 left = UseRegisterAtStart(instr->LeastConstantOperand());
1363 LMulI* mul =
new(zone()) LMulI(left, right, temp);
1364 if (instr->CheckFlag(HValue::kCanOverflow) ||
1365 instr->CheckFlag(HValue::kBailoutOnMinusZero)) {
1366 AssignEnvironment(mul);
1368 return DefineAsRegister(mul);
1370 }
else if (instr->representation().IsDouble()) {
1379 LInstruction* LChunkBuilder::DoSub(HSub* instr) {
1380 if (instr->representation().IsInteger32()) {
1381 ASSERT(instr->left()->representation().IsInteger32());
1382 ASSERT(instr->right()->representation().IsInteger32());
1383 LOperand* left = UseRegisterAtStart(instr->left());
1384 LOperand* right = UseOrConstantAtStart(instr->right());
1385 LSubI* sub =
new(zone()) LSubI(left, right);
1386 LInstruction* result = DefineAsRegister(sub);
1387 if (instr->CheckFlag(HValue::kCanOverflow)) {
1388 result = AssignEnvironment(result);
1391 }
else if (instr->representation().IsDouble()) {
1399 LInstruction* LChunkBuilder::DoAdd(HAdd* instr) {
1400 if (instr->representation().IsInteger32()) {
1401 ASSERT(instr->left()->representation().IsInteger32());
1402 ASSERT(instr->right()->representation().IsInteger32());
1403 LOperand* left = UseRegisterAtStart(instr->LeastConstantOperand());
1404 LOperand* right = UseOrConstantAtStart(instr->MostConstantOperand());
1405 LAddI* add =
new(zone()) LAddI(left, right);
1406 LInstruction* result = DefineAsRegister(add);
1407 if (instr->CheckFlag(HValue::kCanOverflow)) {
1408 result = AssignEnvironment(result);
1411 }
else if (instr->representation().IsDouble()) {
1414 ASSERT(instr->representation().IsTagged());
1420 LInstruction* LChunkBuilder::DoPower(HPower* instr) {
1421 ASSERT(instr->representation().IsDouble());
1424 Representation exponent_type = instr->right()->representation();
1425 ASSERT(instr->left()->representation().IsDouble());
1426 LOperand* left = UseFixedDouble(instr->left(),
f2);
1427 LOperand* right = exponent_type.IsDouble() ?
1428 UseFixedDouble(instr->right(),
f4) :
1429 UseFixed(instr->right(), a2);
1430 LPower* result =
new(zone()) LPower(left, right);
1431 return MarkAsCall(DefineFixedDouble(result,
f0),
1433 CAN_DEOPTIMIZE_EAGERLY);
1437 LInstruction* LChunkBuilder::DoRandom(HRandom* instr) {
1438 ASSERT(instr->representation().IsDouble());
1439 ASSERT(instr->global_object()->representation().IsTagged());
1440 LOperand* global_object = UseFixed(instr->global_object(), a0);
1441 LRandom* result =
new(zone()) LRandom(global_object);
1442 return MarkAsCall(DefineFixedDouble(result,
f0), instr);
1446 LInstruction* LChunkBuilder::DoCompareGeneric(HCompareGeneric* instr) {
1447 ASSERT(instr->left()->representation().IsTagged());
1448 ASSERT(instr->right()->representation().IsTagged());
1449 LOperand* left = UseFixed(instr->left(), a1);
1450 LOperand* right = UseFixed(instr->right(), a0);
1451 LCmpT* result =
new(zone()) LCmpT(left, right);
1452 return MarkAsCall(DefineFixed(result, v0), instr);
1456 LInstruction* LChunkBuilder::DoCompareIDAndBranch(
1457 HCompareIDAndBranch* instr) {
1458 Representation r = instr->GetInputRepresentation();
1459 if (r.IsInteger32()) {
1460 ASSERT(instr->left()->representation().IsInteger32());
1461 ASSERT(instr->right()->representation().IsInteger32());
1462 LOperand* left = UseRegisterOrConstantAtStart(instr->left());
1463 LOperand* right = UseRegisterOrConstantAtStart(instr->right());
1464 return new(zone()) LCmpIDAndBranch(left, right);
1467 ASSERT(instr->left()->representation().IsDouble());
1468 ASSERT(instr->right()->representation().IsDouble());
1469 LOperand* left = UseRegisterAtStart(instr->left());
1470 LOperand* right = UseRegisterAtStart(instr->right());
1471 return new(zone()) LCmpIDAndBranch(left, right);
1476 LInstruction* LChunkBuilder::DoCompareObjectEqAndBranch(
1477 HCompareObjectEqAndBranch* instr) {
1478 LOperand* left = UseRegisterAtStart(instr->left());
1479 LOperand* right = UseRegisterAtStart(instr->right());
1480 return new(zone()) LCmpObjectEqAndBranch(left, right);
1484 LInstruction* LChunkBuilder::DoCompareConstantEqAndBranch(
1485 HCompareConstantEqAndBranch* instr) {
1486 return new(zone()) LCmpConstantEqAndBranch(
1487 UseRegisterAtStart(instr->value()));
1491 LInstruction* LChunkBuilder::DoIsNilAndBranch(HIsNilAndBranch* instr) {
1492 ASSERT(instr->value()->representation().IsTagged());
1493 return new(zone()) LIsNilAndBranch(UseRegisterAtStart(instr->value()));
1497 LInstruction* LChunkBuilder::DoIsObjectAndBranch(HIsObjectAndBranch* instr) {
1498 ASSERT(instr->value()->representation().IsTagged());
1499 LOperand* temp = TempRegister();
1500 return new(zone()) LIsObjectAndBranch(UseRegisterAtStart(instr->value()),
1505 LInstruction* LChunkBuilder::DoIsStringAndBranch(HIsStringAndBranch* instr) {
1506 ASSERT(instr->value()->representation().IsTagged());
1507 LOperand* temp = TempRegister();
1508 return new(zone()) LIsStringAndBranch(UseRegisterAtStart(instr->value()),
1513 LInstruction* LChunkBuilder::DoIsSmiAndBranch(HIsSmiAndBranch* instr) {
1514 ASSERT(instr->value()->representation().IsTagged());
1515 return new(zone()) LIsSmiAndBranch(Use(instr->value()));
1519 LInstruction* LChunkBuilder::DoIsUndetectableAndBranch(
1520 HIsUndetectableAndBranch* instr) {
1521 ASSERT(instr->value()->representation().IsTagged());
1522 return new(zone()) LIsUndetectableAndBranch(
1523 UseRegisterAtStart(instr->value()), TempRegister());
1527 LInstruction* LChunkBuilder::DoStringCompareAndBranch(
1528 HStringCompareAndBranch* instr) {
1529 ASSERT(instr->left()->representation().IsTagged());
1530 ASSERT(instr->right()->representation().IsTagged());
1531 LOperand* left = UseFixed(instr->left(), a1);
1532 LOperand* right = UseFixed(instr->right(), a0);
1533 LStringCompareAndBranch* result =
1534 new(zone()) LStringCompareAndBranch(left, right);
1535 return MarkAsCall(result, instr);
1539 LInstruction* LChunkBuilder::DoHasInstanceTypeAndBranch(
1540 HHasInstanceTypeAndBranch* instr) {
1541 ASSERT(instr->value()->representation().IsTagged());
1542 LOperand* value = UseRegisterAtStart(instr->value());
1543 return new(zone()) LHasInstanceTypeAndBranch(value);
1547 LInstruction* LChunkBuilder::DoGetCachedArrayIndex(
1548 HGetCachedArrayIndex* instr) {
1549 ASSERT(instr->value()->representation().IsTagged());
1550 LOperand* value = UseRegisterAtStart(instr->value());
1552 return DefineAsRegister(
new(zone()) LGetCachedArrayIndex(value));
1556 LInstruction* LChunkBuilder::DoHasCachedArrayIndexAndBranch(
1557 HHasCachedArrayIndexAndBranch* instr) {
1558 ASSERT(instr->value()->representation().IsTagged());
1559 return new(zone()) LHasCachedArrayIndexAndBranch(
1560 UseRegisterAtStart(instr->value()));
1564 LInstruction* LChunkBuilder::DoClassOfTestAndBranch(
1565 HClassOfTestAndBranch* instr) {
1566 ASSERT(instr->value()->representation().IsTagged());
1567 return new(zone()) LClassOfTestAndBranch(UseRegister(instr->value()),
1572 LInstruction* LChunkBuilder::DoJSArrayLength(HJSArrayLength* instr) {
1573 LOperand* array = UseRegisterAtStart(instr->value());
1574 return DefineAsRegister(
new(zone()) LJSArrayLength(array));
1578 LInstruction* LChunkBuilder::DoFixedArrayBaseLength(
1579 HFixedArrayBaseLength* instr) {
1580 LOperand* array = UseRegisterAtStart(instr->value());
1581 return DefineAsRegister(
new(zone()) LFixedArrayBaseLength(array));
1585 LInstruction* LChunkBuilder::DoElementsKind(HElementsKind* instr) {
1586 LOperand*
object = UseRegisterAtStart(instr->value());
1587 return DefineAsRegister(
new(zone()) LElementsKind(
object));
1591 LInstruction* LChunkBuilder::DoValueOf(HValueOf* instr) {
1592 LOperand*
object = UseRegister(instr->value());
1593 LValueOf* result =
new(zone()) LValueOf(
object, TempRegister());
1594 return DefineAsRegister(result);
1598 LInstruction* LChunkBuilder::DoDateField(HDateField* instr) {
1599 LOperand*
object = UseFixed(instr->value(), a0);
1600 LDateField* result =
1601 new(zone()) LDateField(
object, FixedTemp(a1), instr->index());
1602 return MarkAsCall(DefineFixed(result, v0), instr);
1606 LInstruction* LChunkBuilder::DoBoundsCheck(HBoundsCheck* instr) {
1607 LOperand* value = UseRegisterAtStart(instr->index());
1608 LOperand* length = UseRegister(instr->length());
1609 return AssignEnvironment(
new(zone()) LBoundsCheck(value, length));
1613 LInstruction* LChunkBuilder::DoAbnormalExit(HAbnormalExit* instr) {
1620 LInstruction* LChunkBuilder::DoThrow(HThrow* instr) {
1621 LOperand* value = UseFixed(instr->value(), a0);
1622 return MarkAsCall(
new(zone()) LThrow(value), instr);
1626 LInstruction* LChunkBuilder::DoUseConst(HUseConst* instr) {
1631 LInstruction* LChunkBuilder::DoForceRepresentation(HForceRepresentation* bad) {
1639 LInstruction* LChunkBuilder::DoChange(HChange* instr) {
1640 Representation from = instr->from();
1641 Representation to = instr->to();
1642 if (from.IsTagged()) {
1643 if (to.IsDouble()) {
1644 LOperand* value = UseRegister(instr->value());
1645 LNumberUntagD* res =
new(zone()) LNumberUntagD(value);
1646 return AssignEnvironment(DefineAsRegister(res));
1648 ASSERT(to.IsInteger32());
1649 LOperand* value = UseRegisterAtStart(instr->value());
1650 LInstruction* res =
NULL;
1651 if (instr->value()->type().IsSmi()) {
1652 res = DefineAsRegister(
new(zone()) LSmiUntag(value,
false));
1654 LOperand* temp1 = TempRegister();
1655 LOperand* temp2 = instr->CanTruncateToInt32() ? TempRegister()
1657 LOperand* temp3 = instr->CanTruncateToInt32() ? FixedTemp(
f22)
1659 res = DefineSameAsFirst(
new(zone()) LTaggedToI(value,
1663 res = AssignEnvironment(res);
1667 }
else if (from.IsDouble()) {
1668 if (to.IsTagged()) {
1669 LOperand* value = UseRegister(instr->value());
1670 LOperand* temp1 = TempRegister();
1671 LOperand* temp2 = TempRegister();
1675 LUnallocated* result_temp = TempRegister();
1676 LNumberTagD* result =
new(zone()) LNumberTagD(value, temp1, temp2);
1677 Define(result, result_temp);
1678 return AssignPointerMap(result);
1680 ASSERT(to.IsInteger32());
1681 LOperand* value = UseRegister(instr->value());
1682 LOperand* temp1 = TempRegister();
1683 LOperand* temp2 = instr->CanTruncateToInt32() ? TempRegister() :
NULL;
1684 LDoubleToI* res =
new(zone()) LDoubleToI(value, temp1, temp2);
1685 return AssignEnvironment(DefineAsRegister(res));
1687 }
else if (from.IsInteger32()) {
1688 if (to.IsTagged()) {
1689 HValue* val = instr->value();
1690 LOperand* value = UseRegisterAtStart(val);
1691 if (val->HasRange() && val->range()->IsInSmiRange()) {
1692 return DefineAsRegister(
new(zone()) LSmiTag(value));
1694 LNumberTagI* result =
new(zone()) LNumberTagI(value);
1695 return AssignEnvironment(AssignPointerMap(DefineAsRegister(result)));
1699 LOperand* value = Use(instr->value());
1700 return DefineAsRegister(
new(zone()) LInteger32ToDouble(value));
1708 LInstruction* LChunkBuilder::DoCheckNonSmi(HCheckNonSmi* instr) {
1709 LOperand* value = UseRegisterAtStart(instr->value());
1710 return AssignEnvironment(
new(zone()) LCheckNonSmi(value));
1714 LInstruction* LChunkBuilder::DoCheckInstanceType(HCheckInstanceType* instr) {
1715 LOperand* value = UseRegisterAtStart(instr->value());
1716 LInstruction* result =
new(zone()) LCheckInstanceType(value);
1717 return AssignEnvironment(result);
1721 LInstruction* LChunkBuilder::DoCheckPrototypeMaps(HCheckPrototypeMaps* instr) {
1722 LOperand* temp1 = TempRegister();
1723 LOperand* temp2 = TempRegister();
1724 LInstruction* result =
new(zone()) LCheckPrototypeMaps(temp1, temp2);
1725 return AssignEnvironment(result);
1729 LInstruction* LChunkBuilder::DoCheckSmi(HCheckSmi* instr) {
1730 LOperand* value = UseRegisterAtStart(instr->value());
1731 return AssignEnvironment(
new(zone()) LCheckSmi(value));
1735 LInstruction* LChunkBuilder::DoCheckFunction(HCheckFunction* instr) {
1736 LOperand* value = UseRegisterAtStart(instr->value());
1737 return AssignEnvironment(
new(zone()) LCheckFunction(value));
1741 LInstruction* LChunkBuilder::DoCheckMaps(HCheckMaps* instr) {
1742 LOperand* value = UseRegisterAtStart(instr->value());
1743 LInstruction* result =
new(zone()) LCheckMaps(value);
1744 return AssignEnvironment(result);
1748 LInstruction* LChunkBuilder::DoClampToUint8(HClampToUint8* instr) {
1749 HValue* value = instr->value();
1750 Representation input_rep = value->representation();
1751 LOperand* reg = UseRegister(value);
1752 if (input_rep.IsDouble()) {
1754 return DefineAsRegister(
new(zone()) LClampDToUint8(reg, FixedTemp(
f22)));
1755 }
else if (input_rep.IsInteger32()) {
1756 return DefineAsRegister(
new(zone()) LClampIToUint8(reg));
1758 ASSERT(input_rep.IsTagged());
1761 LClampTToUint8* result =
new(zone()) LClampTToUint8(reg, FixedTemp(
f22));
1762 return AssignEnvironment(DefineAsRegister(result));
1767 LInstruction* LChunkBuilder::DoReturn(HReturn* instr) {
1768 return new(zone()) LReturn(UseFixed(instr->value(), v0));
1772 LInstruction* LChunkBuilder::DoConstant(HConstant* instr) {
1773 Representation r = instr->representation();
1774 if (r.IsInteger32()) {
1775 return DefineAsRegister(
new(zone()) LConstantI);
1776 }
else if (r.IsDouble()) {
1777 return DefineAsRegister(
new(zone()) LConstantD);
1778 }
else if (r.IsTagged()) {
1779 return DefineAsRegister(
new(zone()) LConstantT);
1787 LInstruction* LChunkBuilder::DoLoadGlobalCell(HLoadGlobalCell* instr) {
1788 LLoadGlobalCell* result =
new(zone()) LLoadGlobalCell;
1789 return instr->RequiresHoleCheck()
1790 ? AssignEnvironment(DefineAsRegister(result))
1791 : DefineAsRegister(result);
1795 LInstruction* LChunkBuilder::DoLoadGlobalGeneric(HLoadGlobalGeneric* instr) {
1796 LOperand* global_object = UseFixed(instr->global_object(), a0);
1797 LLoadGlobalGeneric* result =
new(zone()) LLoadGlobalGeneric(global_object);
1798 return MarkAsCall(DefineFixed(result, v0), instr);
1802 LInstruction* LChunkBuilder::DoStoreGlobalCell(HStoreGlobalCell* instr) {
1803 LOperand* value = UseRegister(instr->value());
1806 return instr->RequiresHoleCheck()
1807 ? AssignEnvironment(
new(zone()) LStoreGlobalCell(value, TempRegister()))
1808 : new(zone()) LStoreGlobalCell(value,
NULL);
1812 LInstruction* LChunkBuilder::DoStoreGlobalGeneric(HStoreGlobalGeneric* instr) {
1813 LOperand* global_object = UseFixed(instr->global_object(), a1);
1814 LOperand* value = UseFixed(instr->value(), a0);
1815 LStoreGlobalGeneric* result =
1816 new(zone()) LStoreGlobalGeneric(global_object, value);
1817 return MarkAsCall(result, instr);
1821 LInstruction* LChunkBuilder::DoLoadContextSlot(HLoadContextSlot* instr) {
1822 LOperand* context = UseRegisterAtStart(instr->value());
1823 LInstruction* result =
1824 DefineAsRegister(
new(zone()) LLoadContextSlot(context));
1825 return instr->RequiresHoleCheck() ? AssignEnvironment(result) : result;
1829 LInstruction* LChunkBuilder::DoStoreContextSlot(HStoreContextSlot* instr) {
1832 if (instr->NeedsWriteBarrier()) {
1833 context = UseTempRegister(instr->context());
1834 value = UseTempRegister(instr->value());
1836 context = UseRegister(instr->context());
1837 value = UseRegister(instr->value());
1839 LInstruction* result =
new(zone()) LStoreContextSlot(context, value);
1840 return instr->RequiresHoleCheck() ? AssignEnvironment(result) : result;
1844 LInstruction* LChunkBuilder::DoLoadNamedField(HLoadNamedField* instr) {
1845 return DefineAsRegister(
1846 new(zone()) LLoadNamedField(UseRegisterAtStart(instr->object())));
1850 LInstruction* LChunkBuilder::DoLoadNamedFieldPolymorphic(
1851 HLoadNamedFieldPolymorphic* instr) {
1852 ASSERT(instr->representation().IsTagged());
1853 if (instr->need_generic()) {
1854 LOperand* obj = UseFixed(instr->object(), a0);
1855 LLoadNamedFieldPolymorphic* result =
1856 new(zone()) LLoadNamedFieldPolymorphic(obj);
1857 return MarkAsCall(DefineFixed(result, v0), instr);
1859 LOperand* obj = UseRegisterAtStart(instr->object());
1860 LLoadNamedFieldPolymorphic* result =
1861 new(zone()) LLoadNamedFieldPolymorphic(obj);
1862 return AssignEnvironment(DefineAsRegister(result));
1867 LInstruction* LChunkBuilder::DoLoadNamedGeneric(HLoadNamedGeneric* instr) {
1868 LOperand*
object = UseFixed(instr->object(), a0);
1869 LInstruction* result = DefineFixed(
new(zone()) LLoadNamedGeneric(
object), v0);
1870 return MarkAsCall(result, instr);
1874 LInstruction* LChunkBuilder::DoLoadFunctionPrototype(
1875 HLoadFunctionPrototype* instr) {
1876 return AssignEnvironment(DefineAsRegister(
1877 new(zone()) LLoadFunctionPrototype(UseRegister(instr->function()))));
1881 LInstruction* LChunkBuilder::DoLoadElements(HLoadElements* instr) {
1882 LOperand* input = UseRegisterAtStart(instr->value());
1883 return DefineAsRegister(
new(zone()) LLoadElements(input));
1887 LInstruction* LChunkBuilder::DoLoadExternalArrayPointer(
1888 HLoadExternalArrayPointer* instr) {
1889 LOperand* input = UseRegisterAtStart(instr->value());
1890 return DefineAsRegister(
new(zone()) LLoadExternalArrayPointer(input));
1894 LInstruction* LChunkBuilder::DoLoadKeyedFastElement(
1895 HLoadKeyedFastElement* instr) {
1896 ASSERT(instr->representation().IsTagged());
1897 ASSERT(instr->key()->representation().IsInteger32());
1898 LOperand* obj = UseRegisterAtStart(instr->object());
1899 LOperand* key = UseRegisterAtStart(instr->key());
1900 LLoadKeyedFastElement* result =
new(zone()) LLoadKeyedFastElement(obj, key);
1901 if (instr->RequiresHoleCheck()) AssignEnvironment(result);
1902 return DefineAsRegister(result);
1906 LInstruction* LChunkBuilder::DoLoadKeyedFastDoubleElement(
1907 HLoadKeyedFastDoubleElement* instr) {
1908 ASSERT(instr->representation().IsDouble());
1909 ASSERT(instr->key()->representation().IsInteger32());
1910 LOperand* elements = UseTempRegister(instr->elements());
1911 LOperand* key = UseRegisterOrConstantAtStart(instr->key());
1912 LLoadKeyedFastDoubleElement* result =
1913 new(zone()) LLoadKeyedFastDoubleElement(elements, key);
1914 return AssignEnvironment(DefineAsRegister(result));
1918 LInstruction* LChunkBuilder::DoLoadKeyedSpecializedArrayElement(
1919 HLoadKeyedSpecializedArrayElement* instr) {
1921 Representation representation(instr->representation());
1923 (representation.IsInteger32() &&
1926 (representation.IsDouble() &&
1929 ASSERT(instr->key()->representation().IsInteger32());
1930 LOperand* external_pointer = UseRegister(instr->external_pointer());
1931 LOperand* key = UseRegisterOrConstant(instr->key());
1932 LLoadKeyedSpecializedArrayElement* result =
1933 new(zone()) LLoadKeyedSpecializedArrayElement(external_pointer, key);
1934 LInstruction* load_instr = DefineAsRegister(result);
1938 AssignEnvironment(load_instr) : load_instr;
1942 LInstruction* LChunkBuilder::DoLoadKeyedGeneric(HLoadKeyedGeneric* instr) {
1943 LOperand*
object = UseFixed(instr->object(), a1);
1944 LOperand* key = UseFixed(instr->key(), a0);
1946 LInstruction* result =
1947 DefineFixed(
new(zone()) LLoadKeyedGeneric(
object, key), v0);
1948 return MarkAsCall(result, instr);
1952 LInstruction* LChunkBuilder::DoStoreKeyedFastElement(
1953 HStoreKeyedFastElement* instr) {
1954 bool needs_write_barrier = instr->NeedsWriteBarrier();
1955 ASSERT(instr->value()->representation().IsTagged());
1956 ASSERT(instr->object()->representation().IsTagged());
1957 ASSERT(instr->key()->representation().IsInteger32());
1959 LOperand* obj = UseTempRegister(instr->object());
1960 LOperand* val = needs_write_barrier
1961 ? UseTempRegister(instr->value())
1962 : UseRegisterAtStart(instr->value());
1963 LOperand* key = needs_write_barrier
1964 ? UseTempRegister(instr->key())
1965 : UseRegisterOrConstantAtStart(instr->key());
1966 return new(zone()) LStoreKeyedFastElement(obj, key, val);
1970 LInstruction* LChunkBuilder::DoStoreKeyedFastDoubleElement(
1971 HStoreKeyedFastDoubleElement* instr) {
1972 ASSERT(instr->value()->representation().IsDouble());
1973 ASSERT(instr->elements()->representation().IsTagged());
1974 ASSERT(instr->key()->representation().IsInteger32());
1976 LOperand* elements = UseRegisterAtStart(instr->elements());
1977 LOperand* val = UseTempRegister(instr->value());
1978 LOperand* key = UseRegisterOrConstantAtStart(instr->key());
1980 return new(zone()) LStoreKeyedFastDoubleElement(elements, key, val);
1984 LInstruction* LChunkBuilder::DoStoreKeyedSpecializedArrayElement(
1985 HStoreKeyedSpecializedArrayElement* instr) {
1986 Representation representation(instr->value()->representation());
1989 (representation.IsInteger32() &&
1992 (representation.IsDouble() &&
1995 ASSERT(instr->external_pointer()->representation().IsExternal());
1996 ASSERT(instr->key()->representation().IsInteger32());
1998 LOperand* external_pointer = UseRegister(instr->external_pointer());
1999 bool val_is_temp_register =
2002 LOperand* val = val_is_temp_register
2003 ? UseTempRegister(instr->value())
2004 : UseRegister(instr->value());
2005 LOperand* key = UseRegisterOrConstant(instr->key());
2007 return new(zone()) LStoreKeyedSpecializedArrayElement(external_pointer,
2013 LInstruction* LChunkBuilder::DoStoreKeyedGeneric(HStoreKeyedGeneric* instr) {
2014 LOperand* obj = UseFixed(instr->object(), a2);
2015 LOperand* key = UseFixed(instr->key(), a1);
2016 LOperand* val = UseFixed(instr->value(), a0);
2018 ASSERT(instr->object()->representation().IsTagged());
2019 ASSERT(instr->key()->representation().IsTagged());
2020 ASSERT(instr->value()->representation().IsTagged());
2022 return MarkAsCall(
new(zone()) LStoreKeyedGeneric(obj, key, val), instr);
2026 LInstruction* LChunkBuilder::DoTransitionElementsKind(
2027 HTransitionElementsKind* instr) {
2028 ElementsKind from_kind = instr->original_map()->elements_kind();
2029 ElementsKind to_kind = instr->transitioned_map()->elements_kind();
2031 LOperand*
object = UseRegister(instr->object());
2032 LOperand* new_map_reg = TempRegister();
2033 LTransitionElementsKind* result =
2034 new(zone()) LTransitionElementsKind(
object, new_map_reg,
NULL);
2035 return DefineSameAsFirst(result);
2037 LOperand*
object = UseFixed(instr->object(), a0);
2038 LOperand* fixed_object_reg = FixedTemp(a2);
2039 LOperand* new_map_reg = FixedTemp(a3);
2040 LTransitionElementsKind* result =
2041 new(zone()) LTransitionElementsKind(
object,
2044 return MarkAsCall(DefineFixed(result, v0), instr);
2049 LInstruction* LChunkBuilder::DoStoreNamedField(HStoreNamedField* instr) {
2050 bool needs_write_barrier = instr->NeedsWriteBarrier();
2051 bool needs_write_barrier_for_map = !instr->transition().is_null() &&
2052 instr->NeedsWriteBarrierForMap();
2055 if (needs_write_barrier) {
2056 obj = instr->is_in_object()
2057 ? UseRegister(instr->object())
2058 : UseTempRegister(instr->object());
2060 obj = needs_write_barrier_for_map
2061 ? UseRegister(instr->object())
2062 : UseRegisterAtStart(instr->object());
2065 LOperand* val = needs_write_barrier
2066 ? UseTempRegister(instr->value())
2067 : UseRegister(instr->value());
2070 LOperand* temp = needs_write_barrier_for_map ? TempRegister() :
NULL;
2072 return new(zone()) LStoreNamedField(obj, val, temp);
2076 LInstruction* LChunkBuilder::DoStoreNamedGeneric(HStoreNamedGeneric* instr) {
2077 LOperand* obj = UseFixed(instr->object(), a1);
2078 LOperand* val = UseFixed(instr->value(), a0);
2080 LInstruction* result =
new(zone()) LStoreNamedGeneric(obj, val);
2081 return MarkAsCall(result, instr);
2085 LInstruction* LChunkBuilder::DoStringAdd(HStringAdd* instr) {
2086 LOperand* left = UseRegisterAtStart(instr->left());
2087 LOperand* right = UseRegisterAtStart(instr->right());
2088 return MarkAsCall(DefineFixed(
new(zone()) LStringAdd(left, right), v0),
2093 LInstruction* LChunkBuilder::DoStringCharCodeAt(HStringCharCodeAt* instr) {
2094 LOperand*
string = UseTempRegister(instr->string());
2095 LOperand* index = UseTempRegister(instr->index());
2096 LStringCharCodeAt* result =
new(zone()) LStringCharCodeAt(
string, index);
2097 return AssignEnvironment(AssignPointerMap(DefineAsRegister(result)));
2101 LInstruction* LChunkBuilder::DoStringCharFromCode(HStringCharFromCode* instr) {
2102 LOperand* char_code = UseRegister(instr->value());
2103 LStringCharFromCode* result =
new(zone()) LStringCharFromCode(char_code);
2104 return AssignPointerMap(DefineAsRegister(result));
2108 LInstruction* LChunkBuilder::DoStringLength(HStringLength* instr) {
2109 LOperand*
string = UseRegisterAtStart(instr->value());
2110 return DefineAsRegister(
new(zone()) LStringLength(
string));
2114 LInstruction* LChunkBuilder::DoAllocateObject(HAllocateObject* instr) {
2115 LAllocateObject* result =
2116 new(zone()) LAllocateObject(TempRegister(), TempRegister());
2117 return AssignPointerMap(DefineAsRegister(result));
2121 LInstruction* LChunkBuilder::DoFastLiteral(HFastLiteral* instr) {
2122 return MarkAsCall(DefineFixed(
new(zone()) LFastLiteral, v0), instr);
2126 LInstruction* LChunkBuilder::DoArrayLiteral(HArrayLiteral* instr) {
2127 return MarkAsCall(DefineFixed(
new(zone()) LArrayLiteral, v0), instr);
2131 LInstruction* LChunkBuilder::DoObjectLiteral(HObjectLiteral* instr) {
2132 return MarkAsCall(DefineFixed(
new(zone()) LObjectLiteral, v0), instr);
2136 LInstruction* LChunkBuilder::DoRegExpLiteral(HRegExpLiteral* instr) {
2137 return MarkAsCall(DefineFixed(
new(zone()) LRegExpLiteral, v0), instr);
2141 LInstruction* LChunkBuilder::DoFunctionLiteral(HFunctionLiteral* instr) {
2142 return MarkAsCall(DefineFixed(
new(zone()) LFunctionLiteral, v0), instr);
2146 LInstruction* LChunkBuilder::DoDeleteProperty(HDeleteProperty* instr) {
2147 LOperand*
object = UseFixed(instr->object(), a0);
2148 LOperand* key = UseFixed(instr->key(), a1);
2149 LDeleteProperty* result =
new(zone()) LDeleteProperty(
object, key);
2150 return MarkAsCall(DefineFixed(result, v0), instr);
2154 LInstruction* LChunkBuilder::DoOsrEntry(HOsrEntry* instr) {
2155 allocator_->MarkAsOsrEntry();
2156 current_block_->last_environment()->set_ast_id(instr->ast_id());
2157 return AssignEnvironment(
new(zone()) LOsrEntry);
2161 LInstruction* LChunkBuilder::DoParameter(HParameter* instr) {
2162 int spill_index = chunk()->GetParameterStackSlot(instr->index());
2163 return DefineAsSpilled(
new(zone()) LParameter, spill_index);
2167 LInstruction* LChunkBuilder::DoUnknownOSRValue(HUnknownOSRValue* instr) {
2168 int spill_index = chunk()->GetNextSpillIndex(
false);
2169 if (spill_index > LUnallocated::kMaxFixedIndex) {
2170 Abort(
"Too many spill slots needed for OSR");
2173 return DefineAsSpilled(
new(zone()) LUnknownOSRValue, spill_index);
2177 LInstruction* LChunkBuilder::DoCallStub(HCallStub* instr) {
2178 argument_count_ -= instr->argument_count();
2179 return MarkAsCall(DefineFixed(
new(zone()) LCallStub, v0), instr);
2183 LInstruction* LChunkBuilder::DoArgumentsObject(HArgumentsObject* instr) {
2192 LInstruction* LChunkBuilder::DoAccessArgumentsAt(HAccessArgumentsAt* instr) {
2193 LOperand* arguments = UseRegister(instr->arguments());
2194 LOperand* length = UseTempRegister(instr->length());
2195 LOperand* index = UseRegister(instr->index());
2196 LAccessArgumentsAt* result =
2197 new(zone()) LAccessArgumentsAt(arguments, length, index);
2198 return AssignEnvironment(DefineAsRegister(result));
2202 LInstruction* LChunkBuilder::DoToFastProperties(HToFastProperties* instr) {
2203 LOperand*
object = UseFixed(instr->value(), a0);
2204 LToFastProperties* result =
new(zone()) LToFastProperties(
object);
2205 return MarkAsCall(DefineFixed(result, v0), instr);
2209 LInstruction* LChunkBuilder::DoTypeof(HTypeof* instr) {
2210 LTypeof* result =
new(zone()) LTypeof(UseFixed(instr->value(), a0));
2211 return MarkAsCall(DefineFixed(result, v0), instr);
2215 LInstruction* LChunkBuilder::DoTypeofIsAndBranch(HTypeofIsAndBranch* instr) {
2216 return new(zone()) LTypeofIsAndBranch(UseTempRegister(instr->value()));
2220 LInstruction* LChunkBuilder::DoIsConstructCallAndBranch(
2221 HIsConstructCallAndBranch* instr) {
2222 return new(zone()) LIsConstructCallAndBranch(TempRegister());
2226 LInstruction* LChunkBuilder::DoSimulate(HSimulate* instr) {
2227 HEnvironment* env = current_block_->last_environment();
2230 env->set_ast_id(instr->ast_id());
2232 env->Drop(instr->pop_count());
2233 for (
int i = 0; i < instr->values()->length(); ++i) {
2234 HValue* value = instr->values()->at(i);
2235 if (instr->HasAssignedIndexAt(i)) {
2236 env->Bind(instr->GetAssignedIndexAt(i), value);
2244 if (pending_deoptimization_ast_id_ == instr->ast_id()) {
2245 LInstruction* result =
new(zone()) LLazyBailout;
2246 result = AssignEnvironment(result);
2249 instruction_pending_deoptimization_environment_->
2250 SetDeferredLazyDeoptimizationEnvironment(result->environment());
2251 instruction_pending_deoptimization_environment_ =
NULL;
2252 pending_deoptimization_ast_id_ = AstNode::kNoNumber;
2260 LInstruction* LChunkBuilder::DoStackCheck(HStackCheck* instr) {
2261 if (instr->is_function_entry()) {
2262 return MarkAsCall(
new(zone()) LStackCheck, instr);
2264 ASSERT(instr->is_backwards_branch());
2265 return AssignEnvironment(AssignPointerMap(
new(zone()) LStackCheck));
2270 LInstruction* LChunkBuilder::DoEnterInlined(HEnterInlined* instr) {
2271 HEnvironment* outer = current_block_->last_environment();
2272 HConstant* undefined = graph()->GetConstantUndefined();
2273 HEnvironment* inner = outer->CopyForInlining(instr->closure(),
2274 instr->arguments_count(),
2278 instr->is_construct());
2279 if (instr->arguments_var() !=
NULL) {
2280 inner->Bind(instr->arguments_var(), graph()->GetArgumentsObject());
2282 current_block_->UpdateEnvironment(inner);
2283 chunk_->AddInlinedClosure(instr->closure());
2288 LInstruction* LChunkBuilder::DoLeaveInlined(HLeaveInlined* instr) {
2289 LInstruction* pop =
NULL;
2291 HEnvironment* env = current_block_->last_environment();
2293 if (instr->arguments_pushed()) {
2294 int argument_count = env->arguments_environment()->parameter_count();
2295 pop =
new(zone()) LDrop(argument_count);
2296 argument_count_ -= argument_count;
2299 HEnvironment* outer = current_block_->last_environment()->
2300 DiscardInlined(
false);
2301 current_block_->UpdateEnvironment(outer);
2307 LInstruction* LChunkBuilder::DoIn(HIn* instr) {
2308 LOperand* key = UseRegisterAtStart(instr->key());
2309 LOperand*
object = UseRegisterAtStart(instr->object());
2310 LIn* result =
new(zone()) LIn(key,
object);
2311 return MarkAsCall(DefineFixed(result, v0), instr);
2315 LInstruction* LChunkBuilder::DoForInPrepareMap(HForInPrepareMap* instr) {
2316 LOperand*
object = UseFixed(instr->enumerable(), a0);
2317 LForInPrepareMap* result =
new(zone()) LForInPrepareMap(
object);
2318 return MarkAsCall(DefineFixed(result, v0), instr, CAN_DEOPTIMIZE_EAGERLY);
2322 LInstruction* LChunkBuilder::DoForInCacheArray(HForInCacheArray* instr) {
2323 LOperand* map = UseRegister(instr->map());
2324 return AssignEnvironment(DefineAsRegister(
2325 new(zone()) LForInCacheArray(map)));
2329 LInstruction* LChunkBuilder::DoCheckMapValue(HCheckMapValue* instr) {
2330 LOperand* value = UseRegisterAtStart(instr->value());
2331 LOperand* map = UseRegisterAtStart(instr->map());
2332 return AssignEnvironment(
new(zone()) LCheckMapValue(value, map));
2336 LInstruction* LChunkBuilder::DoLoadFieldByIndex(HLoadFieldByIndex* instr) {
2337 LOperand*
object = UseRegister(instr->object());
2338 LOperand* index = UseRegister(instr->index());
2339 return DefineAsRegister(
new(zone()) LLoadFieldByIndex(
object, index));
static LUnallocated * cast(LOperand *op)
Handle< Map > transitioned_map()
virtual void PrintDataTo(StringStream *stream)
virtual void PrintDataTo(StringStream *stream)
Handle< Object > name() const
const char * ToCString(const v8::String::Utf8Value &value)
virtual LOperand * InputAt(int i)=0
void PrintF(const char *format,...)
static String * cast(Object *obj)
virtual void PrintOutputOperandTo(StringStream *stream)
void MarkSpilledDoubleRegister(int allocation_index, LOperand *spill_operand)
virtual void PrintDataTo(StringStream *stream)
value format" "after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false, "print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false, "print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false, "report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true, "garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true, "flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true, "use incremental marking") DEFINE_bool(incremental_marking_steps, true, "do incremental marking steps") DEFINE_bool(trace_incremental_marking, false, "trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true, "Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false, "Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true, "use inline caching") DEFINE_bool(native_code_counters, false, "generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false, "Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true, "Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false, "Never perform compaction on full GC-testing only") DEFINE_bool(compact_code_space, true, "Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true, "Flush inline caches prior to mark compact collection and" "flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0, "Default seed for initializing random generator" "(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true, "allows verbose printing") DEFINE_bool(allow_natives_syntax, false, "allow natives syntax") DEFINE_bool(trace_sim, false, "Trace simulator execution") DEFINE_bool(check_icache, false, "Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0, "Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8, "Stack alingment in bytes in simulator(4 or 8, 8 is default)") DEFINE_bool(trace_exception, false, "print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false, "preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true, "randomize hashes to avoid predictable hash collisions" "(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0, "Fixed seed to use to hash property keys(0 means random)" "(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false, "activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true, "generate optimized regexp code") DEFINE_bool(testing_bool_flag, true, "testing_bool_flag") DEFINE_int(testing_int_flag, 13, "testing_int_flag") DEFINE_float(testing_float_flag, 2.5, "float-flag") DEFINE_string(testing_string_flag, "Hello, world!", "string-flag") DEFINE_int(testing_prng_seed, 42, "Seed used for threading test randomness") DEFINE_string(testing_serialization_file, "/tmp/serdes", "file in which to serialize heap") DEFINE_bool(help, false, "Print usage message, including flags, on console") DEFINE_bool(dump_counters, false, "Dump counters on exit") DEFINE_string(map_counters, "", "Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT, "Pass all remaining arguments to the script.Alias for\"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#43"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2#define FLAG_MODE_DEFINE_DEFAULTS#1"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flag-definitions.h"1#define FLAG_FULL(ftype, ctype, nam, def, cmt)#define FLAG_READONLY(ftype, ctype, nam, def, cmt)#define DEFINE_implication(whenflag, thenflag)#define DEFINE_bool(nam, def, cmt)#define DEFINE_int(nam, def, cmt)#define DEFINE_float(nam, def, cmt)#define DEFINE_string(nam, def, cmt)#define DEFINE_args(nam, def, cmt)#define FLAG DEFINE_bool(use_strict, false,"enforce strict mode") DEFINE_bool(es5_readonly, false,"activate correct semantics for inheriting readonliness") DEFINE_bool(es52_globals, false,"activate new semantics for global var declarations") DEFINE_bool(harmony_typeof, false,"enable harmony semantics for typeof") DEFINE_bool(harmony_scoping, false,"enable harmony block scoping") DEFINE_bool(harmony_modules, false,"enable harmony modules (implies block scoping)") DEFINE_bool(harmony_proxies, false,"enable harmony proxies") DEFINE_bool(harmony_collections, false,"enable harmony collections (sets, maps, and weak maps)") DEFINE_bool(harmony, false,"enable all harmony features (except typeof)") DEFINE_implication(harmony, harmony_scoping) DEFINE_implication(harmony, harmony_modules) DEFINE_implication(harmony, harmony_proxies) DEFINE_implication(harmony, harmony_collections) DEFINE_implication(harmony_modules, harmony_scoping) DEFINE_bool(packed_arrays, false,"optimizes arrays that have no holes") DEFINE_bool(smi_only_arrays, true,"tracks arrays with only smi values") DEFINE_bool(clever_optimizations, true,"Optimize object size, Array shift, DOM strings and string +") DEFINE_bool(unbox_double_arrays, true,"automatically unbox arrays of doubles") DEFINE_bool(string_slices, true,"use string slices") DEFINE_bool(crankshaft, true,"use crankshaft") DEFINE_string(hydrogen_filter,"","optimization filter") DEFINE_bool(use_range, true,"use hydrogen range analysis") DEFINE_bool(eliminate_dead_phis, true,"eliminate dead phis") DEFINE_bool(use_gvn, true,"use hydrogen global value numbering") DEFINE_bool(use_canonicalizing, true,"use hydrogen instruction canonicalizing") DEFINE_bool(use_inlining, true,"use function inlining") DEFINE_int(max_inlined_source_size, 600,"maximum source size in bytes considered for a single inlining") DEFINE_int(max_inlined_nodes, 196,"maximum number of AST nodes considered for a single inlining") DEFINE_int(max_inlined_nodes_cumulative, 196,"maximum cumulative number of AST nodes considered for inlining") DEFINE_bool(loop_invariant_code_motion, true,"loop invariant code motion") DEFINE_bool(collect_megamorphic_maps_from_stub_cache, true,"crankshaft harvests type feedback from stub cache") DEFINE_bool(hydrogen_stats, false,"print statistics for hydrogen") DEFINE_bool(trace_hydrogen, false,"trace generated hydrogen to file") DEFINE_string(trace_phase,"Z","trace generated IR for specified phases") DEFINE_bool(trace_inlining, false,"trace inlining decisions") DEFINE_bool(trace_alloc, false,"trace register allocator") DEFINE_bool(trace_all_uses, false,"trace all use positions") DEFINE_bool(trace_range, false,"trace range analysis") DEFINE_bool(trace_gvn, false,"trace global value numbering") DEFINE_bool(trace_representation, false,"trace representation types") DEFINE_bool(stress_pointer_maps, false,"pointer map for every instruction") DEFINE_bool(stress_environments, false,"environment for every instruction") DEFINE_int(deopt_every_n_times, 0,"deoptimize every n times a deopt point is passed") DEFINE_bool(trap_on_deopt, false,"put a break point before deoptimizing") DEFINE_bool(deoptimize_uncommon_cases, true,"deoptimize uncommon cases") DEFINE_bool(polymorphic_inlining, true,"polymorphic inlining") DEFINE_bool(use_osr, true,"use on-stack replacement") DEFINE_bool(array_bounds_checks_elimination, false,"perform array bounds checks elimination") DEFINE_bool(array_index_dehoisting, false,"perform array index dehoisting") DEFINE_bool(trace_osr, false,"trace on-stack replacement") DEFINE_int(stress_runs, 0,"number of stress runs") DEFINE_bool(optimize_closures, true,"optimize closures") DEFINE_bool(inline_construct, true,"inline constructor calls") DEFINE_bool(inline_arguments, true,"inline functions with arguments object") DEFINE_int(loop_weight, 1,"loop weight for representation inference") DEFINE_bool(optimize_for_in, true,"optimize functions containing for-in loops") DEFINE_bool(experimental_profiler, true,"enable all profiler experiments") DEFINE_bool(watch_ic_patching, false,"profiler considers IC stability") DEFINE_int(frame_count, 1,"number of stack frames inspected by the profiler") DEFINE_bool(self_optimization, false,"primitive functions trigger their own optimization") DEFINE_bool(direct_self_opt, false,"call recompile stub directly when self-optimizing") DEFINE_bool(retry_self_opt, false,"re-try self-optimization if it failed") DEFINE_bool(count_based_interrupts, false,"trigger profiler ticks based on counting instead of timing") DEFINE_bool(interrupt_at_exit, false,"insert an interrupt check at function exit") DEFINE_bool(weighted_back_edges, false,"weight back edges by jump distance for interrupt triggering") DEFINE_int(interrupt_budget, 5900,"execution budget before interrupt is triggered") DEFINE_int(type_info_threshold, 15,"percentage of ICs that must have type info to allow optimization") DEFINE_int(self_opt_count, 130,"call count before self-optimization") DEFINE_implication(experimental_profiler, watch_ic_patching) DEFINE_implication(experimental_profiler, self_optimization) DEFINE_implication(experimental_profiler, retry_self_opt) DEFINE_implication(experimental_profiler, count_based_interrupts) DEFINE_implication(experimental_profiler, interrupt_at_exit) DEFINE_implication(experimental_profiler, weighted_back_edges) DEFINE_bool(trace_opt_verbose, false,"extra verbose compilation tracing") DEFINE_implication(trace_opt_verbose, trace_opt) DEFINE_bool(debug_code, false,"generate extra code (assertions) for debugging") DEFINE_bool(code_comments, false,"emit comments in code disassembly") DEFINE_bool(enable_sse2, true,"enable use of SSE2 instructions if available") DEFINE_bool(enable_sse3, true,"enable use of SSE3 instructions if available") DEFINE_bool(enable_sse4_1, true,"enable use of SSE4.1 instructions if available") DEFINE_bool(enable_cmov, true,"enable use of CMOV instruction if available") DEFINE_bool(enable_rdtsc, true,"enable use of RDTSC instruction if available") DEFINE_bool(enable_sahf, true,"enable use of SAHF instruction if available (X64 only)") DEFINE_bool(enable_vfp3, true,"enable use of VFP3 instructions if available - this implies ""enabling ARMv7 instructions (ARM only)") DEFINE_bool(enable_armv7, true,"enable use of ARMv7 instructions if available (ARM only)") DEFINE_bool(enable_fpu, true,"enable use of MIPS FPU instructions if available (MIPS only)") DEFINE_string(expose_natives_as, NULL,"expose natives in global object") DEFINE_string(expose_debug_as, NULL,"expose debug in global object") DEFINE_bool(expose_gc, false,"expose gc extension") DEFINE_bool(expose_externalize_string, false,"expose externalize string extension") DEFINE_int(stack_trace_limit, 10,"number of stack frames to capture") DEFINE_bool(builtins_in_stack_traces, false,"show built-in functions in stack traces") DEFINE_bool(disable_native_files, false,"disable builtin natives files") DEFINE_bool(inline_new, true,"use fast inline allocation") DEFINE_bool(stack_trace_on_abort, true,"print a stack trace if an assertion failure occurs") DEFINE_bool(trace, false,"trace function calls") DEFINE_bool(mask_constants_with_cookie, true,"use random jit cookie to mask large constants") DEFINE_bool(lazy, true,"use lazy compilation") DEFINE_bool(trace_opt, false,"trace lazy optimization") DEFINE_bool(trace_opt_stats, false,"trace lazy optimization statistics") DEFINE_bool(opt, true,"use adaptive optimizations") DEFINE_bool(always_opt, false,"always try to optimize functions") DEFINE_bool(prepare_always_opt, false,"prepare for turning on always opt") DEFINE_bool(trace_deopt, false,"trace deoptimization") DEFINE_int(min_preparse_length, 1024,"minimum length for automatic enable preparsing") DEFINE_bool(always_full_compiler, false,"try to use the dedicated run-once backend for all code") DEFINE_bool(trace_bailout, false,"print reasons for falling back to using the classic V8 backend") DEFINE_bool(compilation_cache, true,"enable compilation cache") DEFINE_bool(cache_prototype_transitions, true,"cache prototype transitions") DEFINE_bool(trace_debug_json, false,"trace debugging JSON request/response") DEFINE_bool(debugger_auto_break, true,"automatically set the debug break flag when debugger commands are ""in the queue") DEFINE_bool(enable_liveedit, true,"enable liveedit experimental feature") DEFINE_bool(break_on_abort, true,"always cause a debug break before aborting") DEFINE_int(stack_size, kPointerSize *123,"default size of stack region v8 is allowed to use (in kBytes)") DEFINE_int(max_stack_trace_source_length, 300,"maximum length of function source code printed in a stack trace.") DEFINE_bool(always_inline_smi_code, false,"always inline smi code in non-opt code") DEFINE_int(max_new_space_size, 0,"max size of the new generation (in kBytes)") DEFINE_int(max_old_space_size, 0,"max size of the old generation (in Mbytes)") DEFINE_int(max_executable_size, 0,"max size of executable memory (in Mbytes)") DEFINE_bool(gc_global, false,"always perform global GCs") DEFINE_int(gc_interval,-1,"garbage collect after <n> allocations") DEFINE_bool(trace_gc, false,"print one trace line following each garbage collection") DEFINE_bool(trace_gc_nvp, false,"print one detailed trace line in name=value format ""after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false,"print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false,"print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false,"report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true,"garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true,"flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true,"use incremental marking") DEFINE_bool(incremental_marking_steps, true,"do incremental marking steps") DEFINE_bool(trace_incremental_marking, false,"trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true,"Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false,"Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true,"use inline caching") DEFINE_bool(native_code_counters, false,"generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false,"Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true,"Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false,"Never perform compaction on full GC - testing only") DEFINE_bool(compact_code_space, true,"Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true,"Flush inline caches prior to mark compact collection and ""flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0,"Default seed for initializing random generator ""(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true,"allows verbose printing") DEFINE_bool(allow_natives_syntax, false,"allow natives syntax") DEFINE_bool(trace_sim, false,"Trace simulator execution") DEFINE_bool(check_icache, false,"Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0,"Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8,"Stack alingment in bytes in simulator (4 or 8, 8 is default)") DEFINE_bool(trace_exception, false,"print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false,"preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true,"randomize hashes to avoid predictable hash collisions ""(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0,"Fixed seed to use to hash property keys (0 means random)""(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false,"activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true,"generate optimized regexp code") DEFINE_bool(testing_bool_flag, true,"testing_bool_flag") DEFINE_int(testing_int_flag, 13,"testing_int_flag") DEFINE_float(testing_float_flag, 2.5,"float-flag") DEFINE_string(testing_string_flag,"Hello, world!","string-flag") DEFINE_int(testing_prng_seed, 42,"Seed used for threading test randomness") DEFINE_string(testing_serialization_file,"/tmp/serdes","file in which to serialize heap") DEFINE_bool(help, false,"Print usage message, including flags, on console") DEFINE_bool(dump_counters, false,"Dump counters on exit") DEFINE_string(map_counters,"","Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT,"Pass all remaining arguments to the script. Alias for \"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#47"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2 namespace{struct Flag{enum FlagType{TYPE_BOOL, TYPE_INT, TYPE_FLOAT, TYPE_STRING, TYPE_ARGS} name
virtual void PrintDataTo(StringStream *stream)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination trace on stack replacement optimize closures functions with arguments object optimize functions containing for in loops profiler considers IC stability primitive functions trigger their own optimization re try self optimization if it failed insert an interrupt check at function exit execution budget before interrupt is triggered call count before self optimization self_optimization count_based_interrupts weighted_back_edges trace_opt emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 instructions(ARM only)") DEFINE_bool(enable_armv7
LLabel(HBasicBlock *block)
Handle< String > name() const
static const int kNumAllocatableRegisters
Handle< Object > name() const
LEnvironment * environment() const
#define ASSERT(condition)
virtual const char * Mnemonic() const =0
virtual void PrintDataTo(StringStream *stream)
void PrintTo(StringStream *stream)
bool IsSimpleMapChangeTransition(ElementsKind from_kind, ElementsKind to_kind)
#define LITHIUM_CONCRETE_INSTRUCTION_LIST(V)
LChunk(CompilationInfo *info, HGraph *graph)
EqualityKind kind() const
virtual void PrintDataTo(StringStream *stream)
virtual void PrintDataTo(StringStream *stream)
virtual void PrintDataTo(StringStream *stream)
virtual bool HasResult() const =0
virtual void PrintDataTo(StringStream *stream)
DwVfpRegister DoubleRegister
void PrintTo(StringStream *stream)
#define DEFINE_COMPILE(type)
LLabel * replacement() const
virtual const char * Mnemonic() const
virtual void PrintDataTo(StringStream *stream)
virtual void PrintDataTo(StringStream *stream)
void MarkSpilledRegister(int allocation_index, LOperand *spill_operand)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination trace on stack replacement optimize closures functions with arguments object optimize functions containing for in loops profiler considers IC stability primitive functions trigger their own optimization re try self optimization if it failed insert an interrupt check at function exit execution budget before interrupt is triggered call count before self optimization self_optimization count_based_interrupts weighted_back_edges trace_opt emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 enable use of ARMv7 instructions if enable use of MIPS FPU instructions if NULL
static const char * String(Value tok)
LOperand * InputAt(int i)
virtual void PrintDataTo(StringStream *stream)
bool HasEnvironment() const
virtual void PrintDataTo(StringStream *stream)
virtual LOperand * result()=0
virtual void PrintDataTo(StringStream *stream)
virtual void PrintDataTo(StringStream *stream)
virtual void PrintTo(StringStream *stream)
virtual void PrintDataTo(StringStream *stream)
virtual void PrintDataTo(StringStream *stream)
LPointerMap * pointer_map() const
virtual void PrintDataTo(StringStream *stream)
virtual DECLARE_CONCRETE_INSTRUCTION(StringCompareAndBranch,"string-compare-and-branch") Token void PrintDataTo(StringStream *stream)
virtual void PrintDataTo(StringStream *stream)
void PrintDataTo(StringStream *stream) const
virtual const char * Mnemonic() const
static const int kNumAllocatableRegisters
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination trace on stack replacement optimize closures functions with arguments object optimize functions containing for in loops profiler considers IC stability primitive functions trigger their own optimization re try self optimization if it failed insert an interrupt check at function exit execution budget before interrupt is triggered call count before self optimization self_optimization count_based_interrupts weighted_back_edges trace_opt emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 enable use of ARMv7 instructions if enable use of MIPS FPU instructions if NULL
virtual void PrintDataTo(StringStream *stream)
virtual void PrintDataTo(StringStream *stream)
Handle< String > name() const
virtual void PrintDataTo(StringStream *stream)
bool HasPointerMap() const
virtual void PrintDataTo(StringStream *stream)
virtual int InputCount()=0
Handle< String > type_literal()
void PrintTo(StringStream *stream)
virtual void PrintDataTo(StringStream *stream)
Handle< Map > original_map()
virtual void PrintDataTo(StringStream *stream)