30 #if defined(V8_TARGET_ARCH_X64)
39 #define DEFINE_COMPILE(type) \
40 void L##type::CompileToNative(LCodeGen* generator) { \
41 generator->Do##type(this); \
48 register_spills_[i] =
NULL;
51 double_register_spills_[i] =
NULL;
57 LOperand* spill_operand) {
58 ASSERT(spill_operand->IsStackSlot());
59 ASSERT(register_spills_[allocation_index] ==
NULL);
60 register_spills_[allocation_index] = spill_operand;
65 LOperand* spill_operand) {
66 ASSERT(spill_operand->IsDoubleStackSlot());
67 ASSERT(double_register_spills_[allocation_index] ==
NULL);
68 double_register_spills_[allocation_index] = spill_operand;
73 void LInstruction::VerifyCall() {
81 for (UseIterator it(
this); !it.Done(); it.Advance()) {
83 ASSERT(operand->HasFixedPolicy() ||
84 operand->IsUsedAtStart());
86 for (TempIterator it(
this); !it.Done(); it.Advance()) {
88 ASSERT(operand->HasFixedPolicy() ||!operand->HasRegisterPolicy());
95 stream->Add(
"%s ", this->
Mnemonic());
116 if (i > 0) stream->Add(
" ");
131 stream->Add(
" Dead block replaced with B%d", rep->block_id());
137 for (
int i = 0; i < 4; i++) {
138 if (parallel_moves_[i] !=
NULL && !parallel_moves_[i]->
IsRedundant()) {
148 for (
int i = 0; i < 4; i++) {
150 if (parallel_moves_[i] !=
NULL) {
164 case Token::MOD:
return "mod-d";
177 case Token::MOD:
return "mod-t";
179 case Token::BIT_AND:
return "bit-and-t";
180 case Token::BIT_OR:
return "bit-or-t";
181 case Token::BIT_XOR:
return "bit-xor-t";
182 case Token::SHL:
return "sal-t";
183 case Token::SAR:
return "sar-t";
184 case Token::SHR:
return "shr-t";
222 stream->Add(
"if is_object(");
229 stream->Add(
"if is_string(");
236 stream->Add(
"if is_smi(");
243 stream->Add(
"if is_undetectable(");
250 stream->Add(
"if string_compare(");
258 stream->Add(
"if has_instance_type(");
265 stream->Add(
"if has_cached_array_index(");
272 stream->Add(
"if class_of_test(");
274 stream->Add(
", \"%o\") then B%d else B%d",
275 *hydrogen()->class_name(),
282 stream->Add(
"if typeof ");
284 stream->Add(
" == \"%s\" then B%d else B%d",
291 stream->Add(
"#%d / ",
arity());
296 stream->Add(
"/%s ", hydrogen()->OpName());
317 stream->Add(
" #%d / ",
arity());
322 stream->Add(
"[rcx] #%d / ",
arity());
327 SmartArrayPointer<char> name_string =
name()->ToCString();
328 stream->Add(
"%s #%d / ", *name_string,
arity());
333 SmartArrayPointer<char> name_string =
name()->ToCString();
334 stream->Add(
"%s #%d / ", *name_string,
arity());
339 stream->Add(
"#%d / ",
arity());
346 stream->Add(
" #%d / ",
arity());
353 stream->Add(
" length ");
356 stream->Add(
" index ");
362 return spill_slot_count_++;
380 HPhase phase(
"L_Mark empty blocks",
this);
381 for (
int i = 0; i <
graph()->
blocks()->length(); ++i) {
383 int first = block->first_instruction_index();
384 int last = block->last_instruction_index();
389 if (last_instr->IsGoto()) {
390 LGoto* goto_instr = LGoto::cast(last_instr);
391 if (label->IsRedundant() &&
392 !label->is_loop_header()) {
393 bool can_eliminate =
true;
394 for (
int i = first + 1; i < last && can_eliminate; ++i) {
398 if (!gap->IsRedundant()) {
399 can_eliminate =
false;
402 can_eliminate =
false;
407 label->set_replacement(
GetLabel(goto_instr->block_id()));
437 stream->Add(
"] <- ");
446 stream->Add(
"] <- ");
455 stream->Add(
"] <- ");
467 LInstructionGap* gap =
new(graph_->
zone()) LInstructionGap(block);
469 if (instr->IsControl()) {
470 instructions_.Add(gap,
zone());
471 index = instructions_.length();
472 instructions_.Add(instr,
zone());
474 index = instructions_.length();
475 instructions_.Add(instr,
zone());
476 instructions_.Add(gap,
zone());
478 if (instr->HasPointerMap()) {
479 pointer_maps_.Add(instr->pointer_map(),
zone());
480 instr->pointer_map()->set_lithium_position(index);
495 int result = index -
info()->scope()->num_parameters() - 1;
503 return (1 +
info()->scope()->num_parameters() - index) *
514 return instructions_[index]->IsGap();
519 while (!
IsGapAt(index)) index--;
536 LConstantOperand* operand)
const {
537 return graph_->
LookupValue(operand->index())->representation();
541 LChunk* LChunkBuilder::Build() {
543 chunk_ =
new(zone()) LChunk(info(), graph());
544 HPhase phase(
"L_Building chunk", chunk_);
546 const ZoneList<HBasicBlock*>* blocks = graph()->blocks();
547 for (
int i = 0; i < blocks->length(); i++) {
548 HBasicBlock* next =
NULL;
549 if (i < blocks->length() - 1) next = blocks->at(i + 1);
550 DoBasicBlock(blocks->at(i), next);
551 if (is_aborted())
return NULL;
558 void LChunkBuilder::Abort(
const char* format, ...) {
559 if (FLAG_trace_bailout) {
560 SmartArrayPointer<char>
name(
561 info()->shared_info()->DebugName()->
ToCString());
562 PrintF(
"Aborting LChunk building in @\"%s\": ", *
name);
564 va_start(arguments, format);
573 LUnallocated* LChunkBuilder::ToUnallocated(Register reg) {
579 LUnallocated* LChunkBuilder::ToUnallocated(XMMRegister reg) {
585 LOperand* LChunkBuilder::UseFixed(HValue* value, Register fixed_register) {
586 return Use(value, ToUnallocated(fixed_register));
590 LOperand* LChunkBuilder::UseFixedDouble(HValue* value, XMMRegister reg) {
591 return Use(value, ToUnallocated(reg));
595 LOperand* LChunkBuilder::UseRegister(HValue* value) {
600 LOperand* LChunkBuilder::UseRegisterAtStart(HValue* value) {
607 LOperand* LChunkBuilder::UseTempRegister(HValue* value) {
612 LOperand* LChunkBuilder::Use(HValue* value) {
617 LOperand* LChunkBuilder::UseAtStart(HValue* value) {
623 LOperand* LChunkBuilder::UseOrConstant(HValue* value) {
624 return value->IsConstant()
630 LOperand* LChunkBuilder::UseOrConstantAtStart(HValue* value) {
631 return value->IsConstant()
637 LOperand* LChunkBuilder::UseRegisterOrConstant(HValue* value) {
638 return value->IsConstant()
640 : UseRegister(value);
644 LOperand* LChunkBuilder::UseRegisterOrConstantAtStart(HValue* value) {
645 return value->IsConstant()
647 : UseRegisterAtStart(value);
651 LOperand* LChunkBuilder::UseAny(HValue* value) {
652 return value->IsConstant()
658 LOperand* LChunkBuilder::Use(HValue* value, LUnallocated* operand) {
659 if (value->EmitAtUses()) {
661 VisitInstruction(instr);
663 operand->set_virtual_register(value->id());
668 template<
int I,
int T>
669 LInstruction* LChunkBuilder::Define(LTemplateInstruction<1, I, T>* instr,
670 LUnallocated* result) {
671 result->set_virtual_register(current_instruction_->id());
672 instr->set_result(result);
677 template<
int I,
int T>
678 LInstruction* LChunkBuilder::DefineAsRegister(
679 LTemplateInstruction<1, I, T>* instr) {
685 template<
int I,
int T>
686 LInstruction* LChunkBuilder::DefineAsSpilled(
687 LTemplateInstruction<1, I, T>* instr,
694 template<
int I,
int T>
695 LInstruction* LChunkBuilder::DefineSameAsFirst(
696 LTemplateInstruction<1, I, T>* instr) {
702 template<
int I,
int T>
703 LInstruction* LChunkBuilder::DefineFixed(LTemplateInstruction<1, I, T>* instr,
705 return Define(instr, ToUnallocated(reg));
709 template<
int I,
int T>
710 LInstruction* LChunkBuilder::DefineFixedDouble(
711 LTemplateInstruction<1, I, T>* instr,
713 return Define(instr, ToUnallocated(reg));
717 LInstruction* LChunkBuilder::AssignEnvironment(LInstruction* instr) {
718 HEnvironment* hydrogen_env = current_block_->last_environment();
719 int argument_index_accumulator = 0;
720 instr->set_environment(CreateEnvironment(hydrogen_env,
721 &argument_index_accumulator));
726 LInstruction* LChunkBuilder::MarkAsCall(LInstruction* instr,
727 HInstruction* hinstr,
728 CanDeoptimize can_deoptimize) {
733 instr = AssignPointerMap(instr);
735 if (hinstr->HasObservableSideEffects()) {
736 ASSERT(hinstr->next()->IsSimulate());
738 ASSERT(instruction_pending_deoptimization_environment_ ==
NULL);
740 instruction_pending_deoptimization_environment_ = instr;
741 pending_deoptimization_ast_id_ = sim->ast_id();
748 bool needs_environment =
749 (can_deoptimize == CAN_DEOPTIMIZE_EAGERLY) ||
750 !hinstr->HasObservableSideEffects();
751 if (needs_environment && !instr->HasEnvironment()) {
752 instr = AssignEnvironment(instr);
759 LInstruction* LChunkBuilder::AssignPointerMap(LInstruction* instr) {
760 ASSERT(!instr->HasPointerMap());
761 instr->set_pointer_map(
new(zone()) LPointerMap(position_, zone()));
766 LUnallocated* LChunkBuilder::TempRegister() {
767 LUnallocated* operand =
769 operand->set_virtual_register(allocator_->GetVirtualRegister());
770 if (!allocator_->AllocationOk()) Abort(
"Not enough virtual registers.");
775 LOperand* LChunkBuilder::FixedTemp(Register reg) {
776 LUnallocated* operand = ToUnallocated(reg);
777 ASSERT(operand->HasFixedPolicy());
782 LOperand* LChunkBuilder::FixedTemp(XMMRegister reg) {
783 LUnallocated* operand = ToUnallocated(reg);
784 ASSERT(operand->HasFixedPolicy());
789 LInstruction* LChunkBuilder::DoBlockEntry(HBlockEntry* instr) {
790 return new(zone()) LLabel(instr->block());
794 LInstruction* LChunkBuilder::DoSoftDeoptimize(HSoftDeoptimize* instr) {
795 return AssignEnvironment(
new(zone()) LDeoptimize);
799 LInstruction* LChunkBuilder::DoDeoptimize(HDeoptimize* instr) {
800 return AssignEnvironment(
new(zone()) LDeoptimize);
805 HBitwiseBinaryOperation* instr) {
806 if (instr->representation().IsTagged()) {
807 ASSERT(instr->left()->representation().IsTagged());
808 ASSERT(instr->right()->representation().IsTagged());
810 LOperand* left = UseFixed(instr->left(),
rdx);
811 LOperand* right = UseFixed(instr->right(),
rax);
812 LArithmeticT* result =
new(zone()) LArithmeticT(op, left, right);
813 return MarkAsCall(DefineFixed(result,
rax), instr);
816 ASSERT(instr->representation().IsInteger32());
817 ASSERT(instr->left()->representation().IsInteger32());
818 ASSERT(instr->right()->representation().IsInteger32());
819 LOperand* left = UseRegisterAtStart(instr->left());
821 HValue* right_value = instr->right();
822 LOperand* right =
NULL;
823 int constant_value = 0;
824 if (right_value->IsConstant()) {
826 right = chunk_->DefineConstantOperand(constant);
827 constant_value = constant->Integer32Value() & 0x1f;
829 right = UseFixed(right_value,
rcx);
834 bool may_deopt = (op == Token::SHR && constant_value == 0);
835 bool does_deopt =
false;
837 for (HUseIterator it(instr->uses()); !it.Done(); it.Advance()) {
845 LInstruction* result =
846 DefineSameAsFirst(
new(zone()) LShiftI(op, left, right, does_deopt));
847 return does_deopt ? AssignEnvironment(result) : result;
851 LInstruction* LChunkBuilder::DoArithmeticD(
Token::Value op,
852 HArithmeticBinaryOperation* instr) {
853 ASSERT(instr->representation().IsDouble());
854 ASSERT(instr->left()->representation().IsDouble());
855 ASSERT(instr->right()->representation().IsDouble());
857 LOperand* left = UseRegisterAtStart(instr->left());
858 LOperand* right = UseRegisterAtStart(instr->right());
859 LArithmeticD* result =
new(zone()) LArithmeticD(op, left, right);
860 return DefineSameAsFirst(result);
864 LInstruction* LChunkBuilder::DoArithmeticT(
Token::Value op,
865 HArithmeticBinaryOperation* instr) {
871 HValue* left = instr->left();
872 HValue* right = instr->right();
873 ASSERT(left->representation().IsTagged());
874 ASSERT(right->representation().IsTagged());
875 LOperand* left_operand = UseFixed(left,
rdx);
876 LOperand* right_operand = UseFixed(right,
rax);
877 LArithmeticT* result =
878 new(zone()) LArithmeticT(op, left_operand, right_operand);
879 return MarkAsCall(DefineFixed(result,
rax), instr);
883 void LChunkBuilder::DoBasicBlock(HBasicBlock* block, HBasicBlock* next_block) {
885 current_block_ = block;
886 next_block_ = next_block;
887 if (block->IsStartBlock()) {
888 block->UpdateEnvironment(graph_->start_environment());
890 }
else if (block->predecessors()->length() == 1) {
893 ASSERT(block->phis()->length() == 0);
894 HBasicBlock* pred = block->predecessors()->at(0);
895 HEnvironment* last_environment = pred->last_environment();
898 if (pred->end()->SecondSuccessor() ==
NULL) {
899 ASSERT(pred->end()->FirstSuccessor() == block);
901 if (pred->end()->FirstSuccessor()->block_id() > block->block_id() ||
902 pred->end()->SecondSuccessor()->block_id() > block->block_id()) {
903 last_environment = last_environment->Copy();
906 block->UpdateEnvironment(last_environment);
907 ASSERT(pred->argument_count() >= 0);
908 argument_count_ = pred->argument_count();
911 HBasicBlock* pred = block->predecessors()->at(0);
913 HEnvironment* last_environment = pred->last_environment();
914 for (
int i = 0; i < block->phis()->length(); ++i) {
915 HPhi* phi = block->phis()->at(i);
916 last_environment->SetValueAt(phi->merged_index(), phi);
918 for (
int i = 0; i < block->deleted_phis()->length(); ++i) {
919 last_environment->SetValueAt(block->deleted_phis()->at(i),
920 graph_->GetConstantUndefined());
922 block->UpdateEnvironment(last_environment);
924 argument_count_ = pred->argument_count();
926 HInstruction* current = block->first();
927 int start = chunk_->instructions()->length();
928 while (current !=
NULL && !is_aborted()) {
930 if (!current->EmitAtUses()) {
931 VisitInstruction(current);
933 current = current->next();
935 int end = chunk_->instructions()->length() - 1;
937 block->set_first_instruction_index(start);
938 block->set_last_instruction_index(end);
940 block->set_argument_count(argument_count_);
942 current_block_ =
NULL;
946 void LChunkBuilder::VisitInstruction(HInstruction* current) {
947 HInstruction* old_current = current_instruction_;
948 current_instruction_ = current;
949 if (current->has_position()) position_ = current->position();
950 LInstruction* instr = current->CompileToLithium(
this);
953 if (FLAG_stress_pointer_maps && !instr->HasPointerMap()) {
954 instr = AssignPointerMap(instr);
956 if (FLAG_stress_environments && !instr->HasEnvironment()) {
957 instr = AssignEnvironment(instr);
959 instr->set_hydrogen_value(current);
960 chunk_->AddInstruction(instr, current_block_);
962 current_instruction_ = old_current;
966 LEnvironment* LChunkBuilder::CreateEnvironment(
967 HEnvironment* hydrogen_env,
968 int* argument_index_accumulator) {
969 if (hydrogen_env ==
NULL)
return NULL;
971 LEnvironment* outer =
972 CreateEnvironment(hydrogen_env->outer(), argument_index_accumulator);
973 int ast_id = hydrogen_env->ast_id();
976 int value_count = hydrogen_env->length();
977 LEnvironment* result =
new(zone()) LEnvironment(
978 hydrogen_env->closure(),
979 hydrogen_env->frame_type(),
981 hydrogen_env->parameter_count(),
986 int argument_index = *argument_index_accumulator;
987 for (
int i = 0; i < value_count; ++i) {
988 if (hydrogen_env->is_special_index(i))
continue;
990 HValue* value = hydrogen_env->values()->at(i);
992 if (value->IsArgumentsObject()) {
994 }
else if (value->IsPushArgument()) {
995 op =
new(zone()) LArgument(argument_index++);
999 result->AddValue(op, value->representation());
1003 *argument_index_accumulator = argument_index;
1010 LInstruction* LChunkBuilder::DoGoto(HGoto* instr) {
1011 return new(zone()) LGoto(instr->FirstSuccessor()->block_id());
1015 LInstruction* LChunkBuilder::DoBranch(HBranch* instr) {
1016 HValue* value = instr->value();
1017 if (value->EmitAtUses()) {
1018 ASSERT(value->IsConstant());
1019 ASSERT(!value->representation().IsDouble());
1021 ? instr->FirstSuccessor()
1022 : instr->SecondSuccessor();
1023 return new(zone()) LGoto(successor->block_id());
1026 LBranch* result =
new(zone()) LBranch(UseRegister(value));
1030 HType
type = value->type();
1031 if (rep.IsTagged() && !type.IsSmi() && !type.IsBoolean()) {
1032 return AssignEnvironment(result);
1038 LInstruction* LChunkBuilder::DoCompareMap(HCompareMap* instr) {
1039 ASSERT(instr->value()->representation().IsTagged());
1040 LOperand* value = UseRegisterAtStart(instr->value());
1041 return new(zone()) LCmpMapAndBranch(value);
1045 LInstruction* LChunkBuilder::DoArgumentsLength(HArgumentsLength* length) {
1046 return DefineAsRegister(
new(zone()) LArgumentsLength(Use(length->value())));
1050 LInstruction* LChunkBuilder::DoArgumentsElements(HArgumentsElements* elems) {
1051 return DefineAsRegister(
new(zone()) LArgumentsElements);
1055 LInstruction* LChunkBuilder::DoInstanceOf(HInstanceOf* instr) {
1056 LOperand* left = UseFixed(instr->left(),
rax);
1057 LOperand* right = UseFixed(instr->right(),
rdx);
1058 LInstanceOf* result =
new(zone()) LInstanceOf(left, right);
1059 return MarkAsCall(DefineFixed(result,
rax), instr);
1063 LInstruction* LChunkBuilder::DoInstanceOfKnownGlobal(
1064 HInstanceOfKnownGlobal* instr) {
1065 LInstanceOfKnownGlobal* result =
1066 new(zone()) LInstanceOfKnownGlobal(UseFixed(instr->left(),
rax),
1068 return MarkAsCall(DefineFixed(result,
rax), instr);
1072 LInstruction* LChunkBuilder::DoWrapReceiver(HWrapReceiver* instr) {
1073 LOperand* receiver = UseRegister(instr->receiver());
1074 LOperand*
function = UseRegisterAtStart(instr->function());
1075 LWrapReceiver* result =
new(zone()) LWrapReceiver(receiver,
function);
1076 return AssignEnvironment(DefineSameAsFirst(result));
1080 LInstruction* LChunkBuilder::DoApplyArguments(HApplyArguments* instr) {
1081 LOperand*
function = UseFixed(instr->function(),
rdi);
1082 LOperand* receiver = UseFixed(instr->receiver(),
rax);
1083 LOperand* length = UseFixed(instr->length(),
rbx);
1084 LOperand* elements = UseFixed(instr->elements(),
rcx);
1085 LApplyArguments* result =
new(zone()) LApplyArguments(
function,
1089 return MarkAsCall(DefineFixed(result,
rax), instr, CAN_DEOPTIMIZE_EAGERLY);
1093 LInstruction* LChunkBuilder::DoPushArgument(HPushArgument* instr) {
1095 LOperand* argument = UseOrConstant(instr->argument());
1096 return new(zone()) LPushArgument(argument);
1100 LInstruction* LChunkBuilder::DoThisFunction(HThisFunction* instr) {
1101 return instr->HasNoUses()
1103 : DefineAsRegister(
new(zone()) LThisFunction);
1107 LInstruction* LChunkBuilder::DoContext(HContext* instr) {
1108 return instr->HasNoUses() ?
NULL : DefineAsRegister(
new(zone()) LContext);
1112 LInstruction* LChunkBuilder::DoOuterContext(HOuterContext* instr) {
1113 LOperand* context = UseRegisterAtStart(instr->value());
1114 return DefineAsRegister(
new(zone()) LOuterContext(context));
1118 LInstruction* LChunkBuilder::DoDeclareGlobals(HDeclareGlobals* instr) {
1119 return MarkAsCall(
new(zone()) LDeclareGlobals, instr);
1123 LInstruction* LChunkBuilder::DoGlobalObject(HGlobalObject* instr) {
1124 return DefineAsRegister(
new(zone()) LGlobalObject);
1128 LInstruction* LChunkBuilder::DoGlobalReceiver(HGlobalReceiver* instr) {
1129 LOperand* global_object = UseRegisterAtStart(instr->value());
1130 return DefineAsRegister(
new(zone()) LGlobalReceiver(global_object));
1134 LInstruction* LChunkBuilder::DoCallConstantFunction(
1135 HCallConstantFunction* instr) {
1136 argument_count_ -= instr->argument_count();
1137 return MarkAsCall(DefineFixed(
new(zone()) LCallConstantFunction,
rax), instr);
1141 LInstruction* LChunkBuilder::DoInvokeFunction(HInvokeFunction* instr) {
1142 LOperand*
function = UseFixed(instr->function(),
rdi);
1143 argument_count_ -= instr->argument_count();
1144 LInvokeFunction* result =
new(zone()) LInvokeFunction(
function);
1145 return MarkAsCall(DefineFixed(result,
rax), instr, CANNOT_DEOPTIMIZE_EAGERLY);
1149 LInstruction* LChunkBuilder::DoUnaryMathOperation(HUnaryMathOperation* instr) {
1151 if (op == kMathLog || op == kMathSin || op == kMathCos || op == kMathTan) {
1152 LOperand* input = UseFixedDouble(instr->value(),
xmm1);
1153 LUnaryMathOperation* result =
new(zone()) LUnaryMathOperation(input);
1154 return MarkAsCall(DefineFixedDouble(result,
xmm1), instr);
1156 LOperand* input = UseRegisterAtStart(instr->value());
1157 LUnaryMathOperation* result =
new(zone()) LUnaryMathOperation(input);
1160 return AssignEnvironment(AssignPointerMap(DefineSameAsFirst(result)));
1162 return AssignEnvironment(DefineAsRegister(result));
1164 return AssignEnvironment(DefineAsRegister(result));
1166 return DefineSameAsFirst(result);
1168 return DefineSameAsFirst(result);
1177 LInstruction* LChunkBuilder::DoCallKeyed(HCallKeyed* instr) {
1178 ASSERT(instr->key()->representation().IsTagged());
1179 LOperand* key = UseFixed(instr->key(),
rcx);
1180 argument_count_ -= instr->argument_count();
1181 LCallKeyed* result =
new(zone()) LCallKeyed(key);
1182 return MarkAsCall(DefineFixed(result,
rax), instr);
1186 LInstruction* LChunkBuilder::DoCallNamed(HCallNamed* instr) {
1187 argument_count_ -= instr->argument_count();
1188 return MarkAsCall(DefineFixed(
new(zone()) LCallNamed,
rax), instr);
1192 LInstruction* LChunkBuilder::DoCallGlobal(HCallGlobal* instr) {
1193 argument_count_ -= instr->argument_count();
1194 return MarkAsCall(DefineFixed(
new(zone()) LCallGlobal,
rax), instr);
1198 LInstruction* LChunkBuilder::DoCallKnownGlobal(HCallKnownGlobal* instr) {
1199 argument_count_ -= instr->argument_count();
1200 return MarkAsCall(DefineFixed(
new(zone()) LCallKnownGlobal,
rax), instr);
1204 LInstruction* LChunkBuilder::DoCallNew(HCallNew* instr) {
1205 LOperand* constructor = UseFixed(instr->constructor(),
rdi);
1206 argument_count_ -= instr->argument_count();
1207 LCallNew* result =
new(zone()) LCallNew(constructor);
1208 return MarkAsCall(DefineFixed(result,
rax), instr);
1212 LInstruction* LChunkBuilder::DoCallFunction(HCallFunction* instr) {
1213 LOperand*
function = UseFixed(instr->function(),
rdi);
1214 argument_count_ -= instr->argument_count();
1215 LCallFunction* result =
new(zone()) LCallFunction(
function);
1216 return MarkAsCall(DefineFixed(result,
rax), instr);
1220 LInstruction* LChunkBuilder::DoCallRuntime(HCallRuntime* instr) {
1221 argument_count_ -= instr->argument_count();
1222 return MarkAsCall(DefineFixed(
new(zone()) LCallRuntime,
rax), instr);
1226 LInstruction* LChunkBuilder::DoShr(HShr* instr) {
1227 return DoShift(Token::SHR, instr);
1231 LInstruction* LChunkBuilder::DoSar(HSar* instr) {
1232 return DoShift(Token::SAR, instr);
1236 LInstruction* LChunkBuilder::DoShl(HShl* instr) {
1237 return DoShift(Token::SHL, instr);
1241 LInstruction* LChunkBuilder::DoBitwise(HBitwise* instr) {
1242 if (instr->representation().IsInteger32()) {
1243 ASSERT(instr->left()->representation().IsInteger32());
1244 ASSERT(instr->right()->representation().IsInteger32());
1246 LOperand* left = UseRegisterAtStart(instr->LeastConstantOperand());
1247 LOperand* right = UseOrConstantAtStart(instr->MostConstantOperand());
1248 return DefineSameAsFirst(
new(zone()) LBitI(left, right));
1250 ASSERT(instr->representation().IsTagged());
1251 ASSERT(instr->left()->representation().IsTagged());
1252 ASSERT(instr->right()->representation().IsTagged());
1254 LOperand* left = UseFixed(instr->left(),
rdx);
1255 LOperand* right = UseFixed(instr->right(),
rax);
1256 LArithmeticT* result =
new(zone()) LArithmeticT(instr->op(), left, right);
1257 return MarkAsCall(DefineFixed(result,
rax), instr);
1262 LInstruction* LChunkBuilder::DoBitNot(HBitNot* instr) {
1263 ASSERT(instr->value()->representation().IsInteger32());
1264 ASSERT(instr->representation().IsInteger32());
1265 if (instr->HasNoUses())
return NULL;
1266 LOperand* input = UseRegisterAtStart(instr->value());
1267 LBitNotI* result =
new(zone()) LBitNotI(input);
1268 return DefineSameAsFirst(result);
1272 LInstruction* LChunkBuilder::DoDiv(HDiv* instr) {
1273 if (instr->representation().IsDouble()) {
1275 }
else if (instr->representation().IsInteger32()) {
1278 LOperand* temp = FixedTemp(
rdx);
1279 LOperand* dividend = UseFixed(instr->left(),
rax);
1280 LOperand* divisor = UseRegister(instr->right());
1281 LDivI* result =
new(zone()) LDivI(dividend, divisor, temp);
1282 return AssignEnvironment(DefineFixed(result,
rax));
1284 ASSERT(instr->representation().IsTagged());
1290 LInstruction* LChunkBuilder::DoMathFloorOfDiv(HMathFloorOfDiv* instr) {
1296 LInstruction* LChunkBuilder::DoMod(HMod* instr) {
1297 if (instr->representation().IsInteger32()) {
1298 ASSERT(instr->left()->representation().IsInteger32());
1299 ASSERT(instr->right()->representation().IsInteger32());
1301 LInstruction* result;
1302 if (instr->HasPowerOf2Divisor()) {
1304 LOperand* value = UseRegisterAtStart(instr->left());
1306 new(zone()) LModI(value, UseOrConstant(instr->right()),
NULL);
1307 result = DefineSameAsFirst(mod);
1311 LOperand* temp = FixedTemp(
rdx);
1312 LOperand* value = UseFixed(instr->left(),
rax);
1313 LOperand* divisor = UseRegister(instr->right());
1314 LModI* mod =
new(zone()) LModI(value, divisor, temp);
1315 result = DefineFixed(mod,
rdx);
1320 ? AssignEnvironment(result)
1322 }
else if (instr->representation().IsTagged()) {
1323 return DoArithmeticT(Token::MOD, instr);
1325 ASSERT(instr->representation().IsDouble());
1329 LOperand* left = UseFixedDouble(instr->left(),
xmm2);
1330 LOperand* right = UseFixedDouble(instr->right(),
xmm1);
1331 LArithmeticD* result =
new(zone()) LArithmeticD(Token::MOD, left, right);
1332 return MarkAsCall(DefineFixedDouble(result,
xmm1), instr);
1337 LInstruction* LChunkBuilder::DoMul(HMul* instr) {
1338 if (instr->representation().IsInteger32()) {
1339 ASSERT(instr->left()->representation().IsInteger32());
1340 ASSERT(instr->right()->representation().IsInteger32());
1341 LOperand* left = UseRegisterAtStart(instr->LeastConstantOperand());
1342 LOperand* right = UseOrConstant(instr->MostConstantOperand());
1343 LMulI* mul =
new(zone()) LMulI(left, right);
1346 AssignEnvironment(mul);
1348 return DefineSameAsFirst(mul);
1349 }
else if (instr->representation().IsDouble()) {
1352 ASSERT(instr->representation().IsTagged());
1358 LInstruction* LChunkBuilder::DoSub(HSub* instr) {
1359 if (instr->representation().IsInteger32()) {
1360 ASSERT(instr->left()->representation().IsInteger32());
1361 ASSERT(instr->right()->representation().IsInteger32());
1362 LOperand* left = UseRegisterAtStart(instr->left());
1363 LOperand* right = UseOrConstantAtStart(instr->right());
1364 LSubI* sub =
new(zone()) LSubI(left, right);
1365 LInstruction* result = DefineSameAsFirst(sub);
1367 result = AssignEnvironment(result);
1370 }
else if (instr->representation().IsDouble()) {
1373 ASSERT(instr->representation().IsTagged());
1379 LInstruction* LChunkBuilder::DoAdd(HAdd* instr) {
1380 if (instr->representation().IsInteger32()) {
1381 ASSERT(instr->left()->representation().IsInteger32());
1382 ASSERT(instr->right()->representation().IsInteger32());
1383 LOperand* left = UseRegisterAtStart(instr->LeastConstantOperand());
1384 LOperand* right = UseOrConstantAtStart(instr->MostConstantOperand());
1385 LAddI* add =
new(zone()) LAddI(left, right);
1386 LInstruction* result = DefineSameAsFirst(add);
1388 result = AssignEnvironment(result);
1391 }
else if (instr->representation().IsDouble()) {
1394 ASSERT(instr->representation().IsTagged());
1401 LInstruction* LChunkBuilder::DoPower(HPower* instr) {
1402 ASSERT(instr->representation().IsDouble());
1405 Representation exponent_type = instr->right()->representation();
1406 ASSERT(instr->left()->representation().IsDouble());
1407 LOperand* left = UseFixedDouble(instr->left(),
xmm2);
1408 LOperand* right = exponent_type.IsDouble() ?
1409 UseFixedDouble(instr->right(),
xmm1) :
1411 UseFixed(instr->right(),
rdx);
1413 UseFixed(instr->right(),
rdi);
1415 LPower* result =
new(zone()) LPower(left, right);
1416 return MarkAsCall(DefineFixedDouble(result,
xmm3), instr,
1417 CAN_DEOPTIMIZE_EAGERLY);
1421 LInstruction* LChunkBuilder::DoRandom(HRandom* instr) {
1422 ASSERT(instr->representation().IsDouble());
1423 ASSERT(instr->global_object()->representation().IsTagged());
1425 LOperand* global_object = UseFixed(instr->global_object(),
rcx);
1427 LOperand* global_object = UseFixed(instr->global_object(),
rdi);
1429 LRandom* result =
new(zone()) LRandom(global_object);
1430 return MarkAsCall(DefineFixedDouble(result,
xmm1), instr);
1434 LInstruction* LChunkBuilder::DoCompareGeneric(HCompareGeneric* instr) {
1435 ASSERT(instr->left()->representation().IsTagged());
1436 ASSERT(instr->right()->representation().IsTagged());
1437 LOperand* left = UseFixed(instr->left(),
rdx);
1438 LOperand* right = UseFixed(instr->right(),
rax);
1439 LCmpT* result =
new(zone()) LCmpT(left, right);
1440 return MarkAsCall(DefineFixed(result,
rax), instr);
1444 LInstruction* LChunkBuilder::DoCompareIDAndBranch(
1445 HCompareIDAndBranch* instr) {
1446 Representation r = instr->GetInputRepresentation();
1447 if (r.IsInteger32()) {
1448 ASSERT(instr->left()->representation().IsInteger32());
1449 ASSERT(instr->right()->representation().IsInteger32());
1450 LOperand* left = UseRegisterOrConstantAtStart(instr->left());
1451 LOperand* right = UseOrConstantAtStart(instr->right());
1452 return new(zone()) LCmpIDAndBranch(left, right);
1455 ASSERT(instr->left()->representation().IsDouble());
1456 ASSERT(instr->right()->representation().IsDouble());
1459 if (instr->left()->IsConstant() && instr->right()->IsConstant()) {
1460 left = UseRegisterOrConstantAtStart(instr->left());
1461 right = UseRegisterOrConstantAtStart(instr->right());
1463 left = UseRegisterAtStart(instr->left());
1464 right = UseRegisterAtStart(instr->right());
1466 return new(zone()) LCmpIDAndBranch(left, right);
1471 LInstruction* LChunkBuilder::DoCompareObjectEqAndBranch(
1472 HCompareObjectEqAndBranch* instr) {
1473 LOperand* left = UseRegisterAtStart(instr->left());
1474 LOperand* right = UseRegisterAtStart(instr->right());
1475 return new(zone()) LCmpObjectEqAndBranch(left, right);
1479 LInstruction* LChunkBuilder::DoCompareConstantEqAndBranch(
1480 HCompareConstantEqAndBranch* instr) {
1481 LOperand* value = UseRegisterAtStart(instr->value());
1482 return new(zone()) LCmpConstantEqAndBranch(value);
1486 LInstruction* LChunkBuilder::DoIsNilAndBranch(HIsNilAndBranch* instr) {
1487 ASSERT(instr->value()->representation().IsTagged());
1489 return new(zone()) LIsNilAndBranch(UseRegisterAtStart(instr->value()), temp);
1493 LInstruction* LChunkBuilder::DoIsObjectAndBranch(HIsObjectAndBranch* instr) {
1494 ASSERT(instr->value()->representation().IsTagged());
1495 return new(zone()) LIsObjectAndBranch(UseRegisterAtStart(instr->value()));
1499 LInstruction* LChunkBuilder::DoIsStringAndBranch(HIsStringAndBranch* instr) {
1500 ASSERT(instr->value()->representation().IsTagged());
1501 LOperand* value = UseRegisterAtStart(instr->value());
1502 LOperand* temp = TempRegister();
1503 return new(zone()) LIsStringAndBranch(value, temp);
1507 LInstruction* LChunkBuilder::DoIsSmiAndBranch(HIsSmiAndBranch* instr) {
1508 ASSERT(instr->value()->representation().IsTagged());
1509 return new(zone()) LIsSmiAndBranch(Use(instr->value()));
1513 LInstruction* LChunkBuilder::DoIsUndetectableAndBranch(
1514 HIsUndetectableAndBranch* instr) {
1515 ASSERT(instr->value()->representation().IsTagged());
1516 LOperand* value = UseRegisterAtStart(instr->value());
1517 LOperand* temp = TempRegister();
1518 return new(zone()) LIsUndetectableAndBranch(value, temp);
1522 LInstruction* LChunkBuilder::DoStringCompareAndBranch(
1523 HStringCompareAndBranch* instr) {
1525 ASSERT(instr->left()->representation().IsTagged());
1526 ASSERT(instr->right()->representation().IsTagged());
1527 LOperand* left = UseFixed(instr->left(),
rdx);
1528 LOperand* right = UseFixed(instr->right(),
rax);
1529 LStringCompareAndBranch* result =
1530 new(zone()) LStringCompareAndBranch(left, right);
1532 return MarkAsCall(result, instr);
1536 LInstruction* LChunkBuilder::DoHasInstanceTypeAndBranch(
1537 HHasInstanceTypeAndBranch* instr) {
1538 ASSERT(instr->value()->representation().IsTagged());
1539 LOperand* value = UseRegisterAtStart(instr->value());
1540 return new(zone()) LHasInstanceTypeAndBranch(value);
1544 LInstruction* LChunkBuilder::DoGetCachedArrayIndex(
1545 HGetCachedArrayIndex* instr) {
1546 ASSERT(instr->value()->representation().IsTagged());
1547 LOperand* value = UseRegisterAtStart(instr->value());
1549 return DefineAsRegister(
new(zone()) LGetCachedArrayIndex(value));
1553 LInstruction* LChunkBuilder::DoHasCachedArrayIndexAndBranch(
1554 HHasCachedArrayIndexAndBranch* instr) {
1555 ASSERT(instr->value()->representation().IsTagged());
1556 LOperand* value = UseRegisterAtStart(instr->value());
1557 return new(zone()) LHasCachedArrayIndexAndBranch(value);
1561 LInstruction* LChunkBuilder::DoClassOfTestAndBranch(
1562 HClassOfTestAndBranch* instr) {
1563 LOperand* value = UseRegister(instr->value());
1564 return new(zone()) LClassOfTestAndBranch(value,
1570 LInstruction* LChunkBuilder::DoJSArrayLength(HJSArrayLength* instr) {
1571 LOperand* array = UseRegisterAtStart(instr->value());
1572 return DefineAsRegister(
new(zone()) LJSArrayLength(array));
1576 LInstruction* LChunkBuilder::DoFixedArrayBaseLength(
1577 HFixedArrayBaseLength* instr) {
1578 LOperand* array = UseRegisterAtStart(instr->value());
1579 return DefineAsRegister(
new(zone()) LFixedArrayBaseLength(array));
1583 LInstruction* LChunkBuilder::DoElementsKind(HElementsKind* instr) {
1584 LOperand*
object = UseRegisterAtStart(instr->value());
1585 return DefineAsRegister(
new(zone()) LElementsKind(
object));
1589 LInstruction* LChunkBuilder::DoValueOf(HValueOf* instr) {
1590 LOperand*
object = UseRegister(instr->value());
1591 LValueOf* result =
new(zone()) LValueOf(
object);
1592 return DefineSameAsFirst(result);
1596 LInstruction* LChunkBuilder::DoDateField(HDateField* instr) {
1597 LOperand*
object = UseFixed(instr->value(),
rax);
1598 LDateField* result =
new(zone()) LDateField(
object, instr->index());
1599 return MarkAsCall(DefineFixed(result,
rax), instr);
1603 LInstruction* LChunkBuilder::DoBoundsCheck(HBoundsCheck* instr) {
1604 LOperand* value = UseRegisterOrConstantAtStart(instr->index());
1605 LOperand* length = Use(instr->length());
1606 return AssignEnvironment(
new(zone()) LBoundsCheck(value, length));
1610 LInstruction* LChunkBuilder::DoAbnormalExit(HAbnormalExit* instr) {
1617 LInstruction* LChunkBuilder::DoThrow(HThrow* instr) {
1618 LOperand* value = UseFixed(instr->value(),
rax);
1619 return MarkAsCall(
new(zone()) LThrow(value), instr);
1623 LInstruction* LChunkBuilder::DoUseConst(HUseConst* instr) {
1628 LInstruction* LChunkBuilder::DoForceRepresentation(HForceRepresentation* bad) {
1636 LInstruction* LChunkBuilder::DoChange(HChange* instr) {
1637 Representation from = instr->from();
1638 Representation to = instr->to();
1639 if (from.IsTagged()) {
1640 if (to.IsDouble()) {
1641 LOperand* value = UseRegister(instr->value());
1642 LNumberUntagD* res =
new(zone()) LNumberUntagD(value);
1643 return AssignEnvironment(DefineAsRegister(res));
1645 ASSERT(to.IsInteger32());
1646 LOperand* value = UseRegister(instr->value());
1647 if (instr->value()->type().IsSmi()) {
1648 return DefineSameAsFirst(
new(zone()) LSmiUntag(value,
false));
1650 bool truncating = instr->CanTruncateToInt32();
1651 LOperand* xmm_temp = truncating ?
NULL : FixedTemp(
xmm1);
1652 LTaggedToI* res =
new(zone()) LTaggedToI(value, xmm_temp);
1653 return AssignEnvironment(DefineSameAsFirst(res));
1656 }
else if (from.IsDouble()) {
1657 if (to.IsTagged()) {
1658 LOperand* value = UseRegister(instr->value());
1659 LOperand* temp = TempRegister();
1662 LUnallocated* result_temp = TempRegister();
1663 LNumberTagD* result =
new(zone()) LNumberTagD(value, temp);
1664 return AssignPointerMap(Define(result, result_temp));
1666 ASSERT(to.IsInteger32());
1667 LOperand* value = UseRegister(instr->value());
1668 return AssignEnvironment(DefineAsRegister(
new(zone()) LDoubleToI(value)));
1670 }
else if (from.IsInteger32()) {
1671 if (to.IsTagged()) {
1672 HValue* val = instr->value();
1673 LOperand* value = UseRegister(val);
1674 if (val->HasRange() && val->range()->IsInSmiRange()) {
1675 return DefineSameAsFirst(
new(zone()) LSmiTag(value));
1677 LNumberTagI* result =
new(zone()) LNumberTagI(value);
1678 return AssignEnvironment(AssignPointerMap(DefineSameAsFirst(result)));
1682 LOperand* value = Use(instr->value());
1683 return DefineAsRegister(
new(zone()) LInteger32ToDouble(value));
1691 LInstruction* LChunkBuilder::DoCheckNonSmi(HCheckNonSmi* instr) {
1692 LOperand* value = UseRegisterAtStart(instr->value());
1693 return AssignEnvironment(
new(zone()) LCheckNonSmi(value));
1697 LInstruction* LChunkBuilder::DoCheckInstanceType(HCheckInstanceType* instr) {
1698 LOperand* value = UseRegisterAtStart(instr->value());
1699 LCheckInstanceType* result =
new(zone()) LCheckInstanceType(value);
1700 return AssignEnvironment(result);
1704 LInstruction* LChunkBuilder::DoCheckPrototypeMaps(HCheckPrototypeMaps* instr) {
1705 LOperand* temp = TempRegister();
1706 LCheckPrototypeMaps* result =
new(zone()) LCheckPrototypeMaps(temp);
1707 return AssignEnvironment(result);
1711 LInstruction* LChunkBuilder::DoCheckSmi(HCheckSmi* instr) {
1712 LOperand* value = UseRegisterAtStart(instr->value());
1713 return AssignEnvironment(
new(zone()) LCheckSmi(value));
1717 LInstruction* LChunkBuilder::DoCheckFunction(HCheckFunction* instr) {
1718 LOperand* value = UseRegisterAtStart(instr->value());
1719 return AssignEnvironment(
new(zone()) LCheckFunction(value));
1723 LInstruction* LChunkBuilder::DoCheckMaps(HCheckMaps* instr) {
1724 LOperand* value = UseRegisterAtStart(instr->value());
1725 LCheckMaps* result =
new(zone()) LCheckMaps(value);
1726 return AssignEnvironment(result);
1730 LInstruction* LChunkBuilder::DoClampToUint8(HClampToUint8* instr) {
1731 HValue* value = instr->value();
1732 Representation input_rep = value->representation();
1733 LOperand* reg = UseRegister(value);
1734 if (input_rep.IsDouble()) {
1735 return DefineAsRegister(
new(zone()) LClampDToUint8(reg,
1737 }
else if (input_rep.IsInteger32()) {
1738 return DefineSameAsFirst(
new(zone()) LClampIToUint8(reg));
1740 ASSERT(input_rep.IsTagged());
1743 LClampTToUint8* result =
new(zone()) LClampTToUint8(reg,
1746 return AssignEnvironment(DefineSameAsFirst(result));
1751 LInstruction* LChunkBuilder::DoReturn(HReturn* instr) {
1752 return new(zone()) LReturn(UseFixed(instr->value(),
rax));
1756 LInstruction* LChunkBuilder::DoConstant(HConstant* instr) {
1757 Representation r = instr->representation();
1758 if (r.IsInteger32()) {
1759 return DefineAsRegister(
new(zone()) LConstantI);
1760 }
else if (r.IsDouble()) {
1761 LOperand* temp = TempRegister();
1762 return DefineAsRegister(
new(zone()) LConstantD(temp));
1763 }
else if (r.IsTagged()) {
1764 return DefineAsRegister(
new(zone()) LConstantT);
1772 LInstruction* LChunkBuilder::DoLoadGlobalCell(HLoadGlobalCell* instr) {
1773 LLoadGlobalCell* result =
new(zone()) LLoadGlobalCell;
1774 return instr->RequiresHoleCheck()
1775 ? AssignEnvironment(DefineAsRegister(result))
1776 : DefineAsRegister(result);
1780 LInstruction* LChunkBuilder::DoLoadGlobalGeneric(HLoadGlobalGeneric* instr) {
1781 LOperand* global_object = UseFixed(instr->global_object(),
rax);
1782 LLoadGlobalGeneric* result =
new(zone()) LLoadGlobalGeneric(global_object);
1783 return MarkAsCall(DefineFixed(result,
rax), instr);
1787 LInstruction* LChunkBuilder::DoStoreGlobalCell(HStoreGlobalCell* instr) {
1788 LOperand* value = UseRegister(instr->value());
1791 return instr->RequiresHoleCheck()
1792 ? AssignEnvironment(
new(zone()) LStoreGlobalCell(value, TempRegister()))
1793 : new(zone()) LStoreGlobalCell(value,
NULL);
1797 LInstruction* LChunkBuilder::DoStoreGlobalGeneric(HStoreGlobalGeneric* instr) {
1798 LOperand* global_object = UseFixed(instr->global_object(),
rdx);
1799 LOperand* value = UseFixed(instr->value(),
rax);
1800 LStoreGlobalGeneric* result =
new(zone()) LStoreGlobalGeneric(global_object,
1802 return MarkAsCall(result, instr);
1806 LInstruction* LChunkBuilder::DoLoadContextSlot(HLoadContextSlot* instr) {
1807 LOperand* context = UseRegisterAtStart(instr->value());
1808 LInstruction* result =
1809 DefineAsRegister(
new(zone()) LLoadContextSlot(context));
1810 return instr->RequiresHoleCheck() ? AssignEnvironment(result) : result;
1814 LInstruction* LChunkBuilder::DoStoreContextSlot(HStoreContextSlot* instr) {
1818 if (instr->NeedsWriteBarrier()) {
1819 context = UseTempRegister(instr->context());
1820 value = UseTempRegister(instr->value());
1821 temp = TempRegister();
1823 context = UseRegister(instr->context());
1824 value = UseRegister(instr->value());
1827 LInstruction* result =
new(zone()) LStoreContextSlot(context, value, temp);
1828 return instr->RequiresHoleCheck() ? AssignEnvironment(result) : result;
1832 LInstruction* LChunkBuilder::DoLoadNamedField(HLoadNamedField* instr) {
1833 ASSERT(instr->representation().IsTagged());
1834 LOperand* obj = UseRegisterAtStart(instr->object());
1835 return DefineAsRegister(
new(zone()) LLoadNamedField(obj));
1839 LInstruction* LChunkBuilder::DoLoadNamedFieldPolymorphic(
1840 HLoadNamedFieldPolymorphic* instr) {
1841 ASSERT(instr->representation().IsTagged());
1842 if (instr->need_generic()) {
1843 LOperand* obj = UseFixed(instr->object(),
rax);
1844 LLoadNamedFieldPolymorphic* result =
1845 new(zone()) LLoadNamedFieldPolymorphic(obj);
1846 return MarkAsCall(DefineFixed(result,
rax), instr);
1848 LOperand* obj = UseRegisterAtStart(instr->object());
1849 LLoadNamedFieldPolymorphic* result =
1850 new(zone()) LLoadNamedFieldPolymorphic(obj);
1851 return AssignEnvironment(DefineAsRegister(result));
1856 LInstruction* LChunkBuilder::DoLoadNamedGeneric(HLoadNamedGeneric* instr) {
1857 LOperand*
object = UseFixed(instr->object(),
rax);
1858 LLoadNamedGeneric* result =
new(zone()) LLoadNamedGeneric(
object);
1859 return MarkAsCall(DefineFixed(result,
rax), instr);
1863 LInstruction* LChunkBuilder::DoLoadFunctionPrototype(
1864 HLoadFunctionPrototype* instr) {
1865 return AssignEnvironment(DefineAsRegister(
1866 new(zone()) LLoadFunctionPrototype(UseRegister(instr->function()))));
1870 LInstruction* LChunkBuilder::DoLoadElements(HLoadElements* instr) {
1871 LOperand* input = UseRegisterAtStart(instr->value());
1872 return DefineAsRegister(
new(zone()) LLoadElements(input));
1876 LInstruction* LChunkBuilder::DoLoadExternalArrayPointer(
1877 HLoadExternalArrayPointer* instr) {
1878 LOperand* input = UseRegisterAtStart(instr->value());
1879 return DefineAsRegister(
new(zone()) LLoadExternalArrayPointer(input));
1883 LInstruction* LChunkBuilder::DoLoadKeyedFastElement(
1884 HLoadKeyedFastElement* instr) {
1885 ASSERT(instr->representation().IsTagged());
1886 ASSERT(instr->key()->representation().IsInteger32());
1887 LOperand* obj = UseRegisterAtStart(instr->object());
1888 LOperand* key = UseRegisterOrConstantAtStart(instr->key());
1889 LLoadKeyedFastElement* result =
new(zone()) LLoadKeyedFastElement(obj, key);
1890 if (instr->RequiresHoleCheck()) AssignEnvironment(result);
1891 return DefineAsRegister(result);
1895 LInstruction* LChunkBuilder::DoLoadKeyedFastDoubleElement(
1896 HLoadKeyedFastDoubleElement* instr) {
1897 ASSERT(instr->representation().IsDouble());
1898 ASSERT(instr->key()->representation().IsInteger32());
1899 LOperand* elements = UseRegisterAtStart(instr->elements());
1900 LOperand* key = UseRegisterOrConstantAtStart(instr->key());
1901 LLoadKeyedFastDoubleElement* result =
1902 new(zone()) LLoadKeyedFastDoubleElement(elements, key);
1903 return AssignEnvironment(DefineAsRegister(result));
1907 LInstruction* LChunkBuilder::DoLoadKeyedSpecializedArrayElement(
1908 HLoadKeyedSpecializedArrayElement* instr) {
1911 (instr->representation().IsInteger32() &&
1914 (instr->representation().IsDouble() &&
1917 ASSERT(instr->key()->representation().IsInteger32());
1918 LOperand* external_pointer = UseRegister(instr->external_pointer());
1919 LOperand* key = UseRegisterOrConstant(instr->key());
1920 LLoadKeyedSpecializedArrayElement* result =
1921 new(zone()) LLoadKeyedSpecializedArrayElement(external_pointer, key);
1922 LInstruction* load_instr = DefineAsRegister(result);
1926 AssignEnvironment(load_instr) : load_instr;
1930 LInstruction* LChunkBuilder::DoLoadKeyedGeneric(HLoadKeyedGeneric* instr) {
1931 LOperand*
object = UseFixed(instr->object(),
rdx);
1932 LOperand* key = UseFixed(instr->key(),
rax);
1934 LLoadKeyedGeneric* result =
new(zone()) LLoadKeyedGeneric(
object, key);
1935 return MarkAsCall(DefineFixed(result,
rax), instr);
1939 LInstruction* LChunkBuilder::DoStoreKeyedFastElement(
1940 HStoreKeyedFastElement* instr) {
1941 bool needs_write_barrier = instr->NeedsWriteBarrier();
1942 ASSERT(instr->value()->representation().IsTagged());
1943 ASSERT(instr->object()->representation().IsTagged());
1944 ASSERT(instr->key()->representation().IsInteger32());
1946 LOperand* obj = UseTempRegister(instr->object());
1947 LOperand* val = needs_write_barrier
1948 ? UseTempRegister(instr->value())
1949 : UseRegisterAtStart(instr->value());
1950 LOperand* key = needs_write_barrier
1951 ? UseTempRegister(instr->key())
1952 : UseRegisterOrConstantAtStart(instr->key());
1953 return new(zone()) LStoreKeyedFastElement(obj, key, val);
1957 LInstruction* LChunkBuilder::DoStoreKeyedFastDoubleElement(
1958 HStoreKeyedFastDoubleElement* instr) {
1959 ASSERT(instr->value()->representation().IsDouble());
1960 ASSERT(instr->elements()->representation().IsTagged());
1961 ASSERT(instr->key()->representation().IsInteger32());
1963 LOperand* elements = UseRegisterAtStart(instr->elements());
1964 LOperand* val = UseTempRegister(instr->value());
1965 LOperand* key = UseRegisterOrConstantAtStart(instr->key());
1967 return new(zone()) LStoreKeyedFastDoubleElement(elements, key, val);
1971 LInstruction* LChunkBuilder::DoStoreKeyedSpecializedArrayElement(
1972 HStoreKeyedSpecializedArrayElement* instr) {
1975 (instr->value()->representation().IsInteger32() &&
1978 (instr->value()->representation().IsDouble() &&
1981 ASSERT(instr->external_pointer()->representation().IsExternal());
1982 ASSERT(instr->key()->representation().IsInteger32());
1984 LOperand* external_pointer = UseRegister(instr->external_pointer());
1985 bool val_is_temp_register =
1988 LOperand* val = val_is_temp_register
1989 ? UseTempRegister(instr->value())
1990 : UseRegister(instr->value());
1991 LOperand* key = UseRegisterOrConstant(instr->key());
1993 return new(zone()) LStoreKeyedSpecializedArrayElement(external_pointer,
1999 LInstruction* LChunkBuilder::DoStoreKeyedGeneric(HStoreKeyedGeneric* instr) {
2000 LOperand*
object = UseFixed(instr->object(),
rdx);
2001 LOperand* key = UseFixed(instr->key(),
rcx);
2002 LOperand* value = UseFixed(instr->value(),
rax);
2004 ASSERT(instr->object()->representation().IsTagged());
2005 ASSERT(instr->key()->representation().IsTagged());
2006 ASSERT(instr->value()->representation().IsTagged());
2008 LStoreKeyedGeneric* result =
2009 new(zone()) LStoreKeyedGeneric(
object, key, value);
2010 return MarkAsCall(result, instr);
2014 LInstruction* LChunkBuilder::DoTransitionElementsKind(
2015 HTransitionElementsKind* instr) {
2016 ElementsKind from_kind = instr->original_map()->elements_kind();
2017 ElementsKind to_kind = instr->transitioned_map()->elements_kind();
2019 LOperand*
object = UseRegister(instr->object());
2020 LOperand* new_map_reg = TempRegister();
2021 LOperand* temp_reg = TempRegister();
2022 LTransitionElementsKind* result =
2023 new(zone()) LTransitionElementsKind(
object, new_map_reg, temp_reg);
2024 return DefineSameAsFirst(result);
2026 LOperand*
object = UseFixed(instr->object(),
rax);
2027 LOperand* fixed_object_reg = FixedTemp(
rdx);
2028 LOperand* new_map_reg = FixedTemp(
rbx);
2029 LTransitionElementsKind* result =
2030 new(zone()) LTransitionElementsKind(
object,
2033 return MarkAsCall(DefineFixed(result,
rax), instr);
2038 LInstruction* LChunkBuilder::DoStoreNamedField(HStoreNamedField* instr) {
2039 bool needs_write_barrier = instr->NeedsWriteBarrier();
2040 bool needs_write_barrier_for_map = !instr->transition().is_null() &&
2041 instr->NeedsWriteBarrierForMap();
2044 if (needs_write_barrier) {
2045 obj = instr->is_in_object()
2046 ? UseRegister(instr->object())
2047 : UseTempRegister(instr->object());
2049 obj = needs_write_barrier_for_map
2050 ? UseRegister(instr->object())
2051 : UseRegisterAtStart(instr->object());
2054 LOperand* val = needs_write_barrier
2055 ? UseTempRegister(instr->value())
2056 : UseRegister(instr->value());
2060 LOperand* temp = (!instr->is_in_object() || needs_write_barrier ||
2061 needs_write_barrier_for_map) ? TempRegister() :
NULL;
2063 return new(zone()) LStoreNamedField(obj, val, temp);
2067 LInstruction* LChunkBuilder::DoStoreNamedGeneric(HStoreNamedGeneric* instr) {
2068 LOperand*
object = UseFixed(instr->object(),
rdx);
2069 LOperand* value = UseFixed(instr->value(),
rax);
2071 LStoreNamedGeneric* result =
new(zone()) LStoreNamedGeneric(
object, value);
2072 return MarkAsCall(result, instr);
2076 LInstruction* LChunkBuilder::DoStringAdd(HStringAdd* instr) {
2077 LOperand* left = UseOrConstantAtStart(instr->left());
2078 LOperand* right = UseOrConstantAtStart(instr->right());
2079 return MarkAsCall(DefineFixed(
new(zone()) LStringAdd(left, right),
rax),
2084 LInstruction* LChunkBuilder::DoStringCharCodeAt(HStringCharCodeAt* instr) {
2085 LOperand*
string = UseTempRegister(instr->string());
2086 LOperand* index = UseTempRegister(instr->index());
2087 LStringCharCodeAt* result =
new(zone()) LStringCharCodeAt(
string, index);
2088 return AssignEnvironment(AssignPointerMap(DefineAsRegister(result)));
2092 LInstruction* LChunkBuilder::DoStringCharFromCode(HStringCharFromCode* instr) {
2093 LOperand* char_code = UseRegister(instr->value());
2094 LStringCharFromCode* result =
new(zone()) LStringCharFromCode(char_code);
2095 return AssignPointerMap(DefineAsRegister(result));
2099 LInstruction* LChunkBuilder::DoStringLength(HStringLength* instr) {
2100 LOperand*
string = UseRegisterAtStart(instr->value());
2101 return DefineAsRegister(
new(zone()) LStringLength(
string));
2105 LInstruction* LChunkBuilder::DoAllocateObject(HAllocateObject* instr) {
2106 LAllocateObject* result =
new(zone()) LAllocateObject(TempRegister());
2107 return AssignPointerMap(DefineAsRegister(result));
2111 LInstruction* LChunkBuilder::DoFastLiteral(HFastLiteral* instr) {
2112 return MarkAsCall(DefineFixed(
new(zone()) LFastLiteral,
rax), instr);
2116 LInstruction* LChunkBuilder::DoArrayLiteral(HArrayLiteral* instr) {
2117 return MarkAsCall(DefineFixed(
new(zone()) LArrayLiteral,
rax), instr);
2121 LInstruction* LChunkBuilder::DoObjectLiteral(HObjectLiteral* instr) {
2122 return MarkAsCall(DefineFixed(
new(zone()) LObjectLiteral,
rax), instr);
2126 LInstruction* LChunkBuilder::DoRegExpLiteral(HRegExpLiteral* instr) {
2127 return MarkAsCall(DefineFixed(
new(zone()) LRegExpLiteral,
rax), instr);
2131 LInstruction* LChunkBuilder::DoFunctionLiteral(HFunctionLiteral* instr) {
2132 return MarkAsCall(DefineFixed(
new(zone()) LFunctionLiteral,
rax), instr);
2136 LInstruction* LChunkBuilder::DoDeleteProperty(HDeleteProperty* instr) {
2137 LOperand*
object = UseAtStart(instr->object());
2138 LOperand* key = UseOrConstantAtStart(instr->key());
2139 LDeleteProperty* result =
new(zone()) LDeleteProperty(
object, key);
2140 return MarkAsCall(DefineFixed(result,
rax), instr);
2144 LInstruction* LChunkBuilder::DoOsrEntry(HOsrEntry* instr) {
2145 allocator_->MarkAsOsrEntry();
2146 current_block_->last_environment()->set_ast_id(instr->ast_id());
2147 return AssignEnvironment(
new(zone()) LOsrEntry);
2151 LInstruction* LChunkBuilder::DoParameter(HParameter* instr) {
2152 int spill_index = chunk()->GetParameterStackSlot(instr->index());
2153 return DefineAsSpilled(
new(zone()) LParameter, spill_index);
2157 LInstruction* LChunkBuilder::DoUnknownOSRValue(HUnknownOSRValue* instr) {
2158 int spill_index = chunk()->GetNextSpillIndex(
false);
2160 Abort(
"Too many spill slots needed for OSR");
2163 return DefineAsSpilled(
new(zone()) LUnknownOSRValue, spill_index);
2167 LInstruction* LChunkBuilder::DoCallStub(HCallStub* instr) {
2168 argument_count_ -= instr->argument_count();
2169 return MarkAsCall(DefineFixed(
new(zone()) LCallStub,
rax), instr);
2173 LInstruction* LChunkBuilder::DoArgumentsObject(HArgumentsObject* instr) {
2182 LInstruction* LChunkBuilder::DoAccessArgumentsAt(HAccessArgumentsAt* instr) {
2183 LOperand* arguments = UseRegister(instr->arguments());
2184 LOperand* length = UseTempRegister(instr->length());
2185 LOperand* index = Use(instr->index());
2186 LAccessArgumentsAt* result =
2187 new(zone()) LAccessArgumentsAt(arguments, length, index);
2188 return AssignEnvironment(DefineAsRegister(result));
2192 LInstruction* LChunkBuilder::DoToFastProperties(HToFastProperties* instr) {
2193 LOperand*
object = UseFixed(instr->value(),
rax);
2194 LToFastProperties* result =
new(zone()) LToFastProperties(
object);
2195 return MarkAsCall(DefineFixed(result,
rax), instr);
2199 LInstruction* LChunkBuilder::DoTypeof(HTypeof* instr) {
2200 LTypeof* result =
new(zone()) LTypeof(UseAtStart(instr->value()));
2201 return MarkAsCall(DefineFixed(result,
rax), instr);
2205 LInstruction* LChunkBuilder::DoTypeofIsAndBranch(HTypeofIsAndBranch* instr) {
2206 return new(zone()) LTypeofIsAndBranch(UseTempRegister(instr->value()));
2210 LInstruction* LChunkBuilder::DoIsConstructCallAndBranch(
2211 HIsConstructCallAndBranch* instr) {
2212 return new(zone()) LIsConstructCallAndBranch(TempRegister());
2216 LInstruction* LChunkBuilder::DoSimulate(HSimulate* instr) {
2217 HEnvironment* env = current_block_->last_environment();
2220 env->set_ast_id(instr->ast_id());
2222 env->Drop(instr->pop_count());
2223 for (
int i = 0; i < instr->values()->length(); ++i) {
2224 HValue* value = instr->values()->at(i);
2225 if (instr->HasAssignedIndexAt(i)) {
2226 env->Bind(instr->GetAssignedIndexAt(i), value);
2234 if (pending_deoptimization_ast_id_ == instr->ast_id()) {
2235 LLazyBailout* lazy_bailout =
new(zone()) LLazyBailout;
2236 LInstruction* result = AssignEnvironment(lazy_bailout);
2239 instruction_pending_deoptimization_environment_->
2240 SetDeferredLazyDeoptimizationEnvironment(result->environment());
2241 instruction_pending_deoptimization_environment_ =
NULL;
2250 LInstruction* LChunkBuilder::DoStackCheck(HStackCheck* instr) {
2251 if (instr->is_function_entry()) {
2252 return MarkAsCall(
new(zone()) LStackCheck, instr);
2254 ASSERT(instr->is_backwards_branch());
2255 return AssignEnvironment(AssignPointerMap(
new(zone()) LStackCheck));
2260 LInstruction* LChunkBuilder::DoEnterInlined(HEnterInlined* instr) {
2261 HEnvironment* outer = current_block_->last_environment();
2262 HConstant* undefined = graph()->GetConstantUndefined();
2263 HEnvironment* inner = outer->CopyForInlining(instr->closure(),
2264 instr->arguments_count(),
2268 instr->is_construct());
2269 if (instr->arguments_var() !=
NULL) {
2270 inner->Bind(instr->arguments_var(), graph()->GetArgumentsObject());
2272 current_block_->UpdateEnvironment(inner);
2273 chunk_->AddInlinedClosure(instr->closure());
2278 LInstruction* LChunkBuilder::DoLeaveInlined(HLeaveInlined* instr) {
2279 LInstruction* pop =
NULL;
2281 HEnvironment* env = current_block_->last_environment();
2283 if (instr->arguments_pushed()) {
2284 int argument_count = env->arguments_environment()->parameter_count();
2285 pop =
new(zone()) LDrop(argument_count);
2286 argument_count_ -= argument_count;
2289 HEnvironment* outer = current_block_->last_environment()->
2290 DiscardInlined(
false);
2291 current_block_->UpdateEnvironment(outer);
2297 LInstruction* LChunkBuilder::DoIn(HIn* instr) {
2298 LOperand* key = UseOrConstantAtStart(instr->key());
2299 LOperand*
object = UseOrConstantAtStart(instr->object());
2300 LIn* result =
new(zone()) LIn(key,
object);
2301 return MarkAsCall(DefineFixed(result,
rax), instr);
2305 LInstruction* LChunkBuilder::DoForInPrepareMap(HForInPrepareMap* instr) {
2306 LOperand*
object = UseFixed(instr->enumerable(),
rax);
2307 LForInPrepareMap* result =
new(zone()) LForInPrepareMap(
object);
2308 return MarkAsCall(DefineFixed(result,
rax), instr, CAN_DEOPTIMIZE_EAGERLY);
2312 LInstruction* LChunkBuilder::DoForInCacheArray(HForInCacheArray* instr) {
2313 LOperand* map = UseRegister(instr->map());
2314 return AssignEnvironment(DefineAsRegister(
2315 new(zone()) LForInCacheArray(map)));
2319 LInstruction* LChunkBuilder::DoCheckMapValue(HCheckMapValue* instr) {
2320 LOperand* value = UseRegisterAtStart(instr->value());
2321 LOperand* map = UseRegisterAtStart(instr->map());
2322 return AssignEnvironment(
new(zone()) LCheckMapValue(value, map));
2326 LInstruction* LChunkBuilder::DoLoadFieldByIndex(HLoadFieldByIndex* instr) {
2327 LOperand*
object = UseRegister(instr->object());
2328 LOperand* index = UseTempRegister(instr->index());
2329 return DefineSameAsFirst(
new(zone()) LLoadFieldByIndex(
object, index));
2335 #endif // V8_TARGET_ARCH_X64
HValue * LookupValue(int id) const
#define DEFINE_COMPILE(type)
static LUnallocated * cast(LOperand *op)
static LGap * cast(LInstruction *instr)
static LConstantOperand * Create(int index, Zone *zone)
Handle< Map > transitioned_map()
virtual void PrintDataTo(StringStream *stream)
virtual void PrintDataTo(StringStream *stream)
Handle< Object > name() const
const char * ToCString(const v8::String::Utf8Value &value)
virtual LOperand * InputAt(int i)=0
int GetParameterStackSlot(int index) const
void PrintF(const char *format,...)
static String * cast(Object *obj)
virtual void PrintOutputOperandTo(StringStream *stream)
void MarkSpilledDoubleRegister(int allocation_index, LOperand *spill_operand)
LParallelMove * GetOrCreateParallelMove(InnerPosition pos, Zone *zone)
virtual void PrintDataTo(StringStream *stream)
int ParameterAt(int index)
value format" "after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false, "print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false, "print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false, "report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true, "garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true, "flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true, "use incremental marking") DEFINE_bool(incremental_marking_steps, true, "do incremental marking steps") DEFINE_bool(trace_incremental_marking, false, "trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true, "Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false, "Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true, "use inline caching") DEFINE_bool(native_code_counters, false, "generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false, "Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true, "Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false, "Never perform compaction on full GC-testing only") DEFINE_bool(compact_code_space, true, "Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true, "Flush inline caches prior to mark compact collection and" "flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0, "Default seed for initializing random generator" "(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true, "allows verbose printing") DEFINE_bool(allow_natives_syntax, false, "allow natives syntax") DEFINE_bool(trace_sim, false, "Trace simulator execution") DEFINE_bool(check_icache, false, "Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0, "Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8, "Stack alingment in bytes in simulator(4 or 8, 8 is default)") DEFINE_bool(trace_exception, false, "print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false, "preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true, "randomize hashes to avoid predictable hash collisions" "(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0, "Fixed seed to use to hash property keys(0 means random)" "(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false, "activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true, "generate optimized regexp code") DEFINE_bool(testing_bool_flag, true, "testing_bool_flag") DEFINE_int(testing_int_flag, 13, "testing_int_flag") DEFINE_float(testing_float_flag, 2.5, "float-flag") DEFINE_string(testing_string_flag, "Hello, world!", "string-flag") DEFINE_int(testing_prng_seed, 42, "Seed used for threading test randomness") DEFINE_string(testing_serialization_file, "/tmp/serdes", "file in which to serialize heap") DEFINE_bool(help, false, "Print usage message, including flags, on console") DEFINE_bool(dump_counters, false, "Dump counters on exit") DEFINE_string(map_counters, "", "Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT, "Pass all remaining arguments to the script.Alias for\"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#43"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2#define FLAG_MODE_DEFINE_DEFAULTS#1"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flag-definitions.h"1#define FLAG_FULL(ftype, ctype, nam, def, cmt)#define FLAG_READONLY(ftype, ctype, nam, def, cmt)#define DEFINE_implication(whenflag, thenflag)#define DEFINE_bool(nam, def, cmt)#define DEFINE_int(nam, def, cmt)#define DEFINE_float(nam, def, cmt)#define DEFINE_string(nam, def, cmt)#define DEFINE_args(nam, def, cmt)#define FLAG DEFINE_bool(use_strict, false,"enforce strict mode") DEFINE_bool(es5_readonly, false,"activate correct semantics for inheriting readonliness") DEFINE_bool(es52_globals, false,"activate new semantics for global var declarations") DEFINE_bool(harmony_typeof, false,"enable harmony semantics for typeof") DEFINE_bool(harmony_scoping, false,"enable harmony block scoping") DEFINE_bool(harmony_modules, false,"enable harmony modules (implies block scoping)") DEFINE_bool(harmony_proxies, false,"enable harmony proxies") DEFINE_bool(harmony_collections, false,"enable harmony collections (sets, maps, and weak maps)") DEFINE_bool(harmony, false,"enable all harmony features (except typeof)") DEFINE_implication(harmony, harmony_scoping) DEFINE_implication(harmony, harmony_modules) DEFINE_implication(harmony, harmony_proxies) DEFINE_implication(harmony, harmony_collections) DEFINE_implication(harmony_modules, harmony_scoping) DEFINE_bool(packed_arrays, false,"optimizes arrays that have no holes") DEFINE_bool(smi_only_arrays, true,"tracks arrays with only smi values") DEFINE_bool(clever_optimizations, true,"Optimize object size, Array shift, DOM strings and string +") DEFINE_bool(unbox_double_arrays, true,"automatically unbox arrays of doubles") DEFINE_bool(string_slices, true,"use string slices") DEFINE_bool(crankshaft, true,"use crankshaft") DEFINE_string(hydrogen_filter,"","optimization filter") DEFINE_bool(use_range, true,"use hydrogen range analysis") DEFINE_bool(eliminate_dead_phis, true,"eliminate dead phis") DEFINE_bool(use_gvn, true,"use hydrogen global value numbering") DEFINE_bool(use_canonicalizing, true,"use hydrogen instruction canonicalizing") DEFINE_bool(use_inlining, true,"use function inlining") DEFINE_int(max_inlined_source_size, 600,"maximum source size in bytes considered for a single inlining") DEFINE_int(max_inlined_nodes, 196,"maximum number of AST nodes considered for a single inlining") DEFINE_int(max_inlined_nodes_cumulative, 196,"maximum cumulative number of AST nodes considered for inlining") DEFINE_bool(loop_invariant_code_motion, true,"loop invariant code motion") DEFINE_bool(collect_megamorphic_maps_from_stub_cache, true,"crankshaft harvests type feedback from stub cache") DEFINE_bool(hydrogen_stats, false,"print statistics for hydrogen") DEFINE_bool(trace_hydrogen, false,"trace generated hydrogen to file") DEFINE_string(trace_phase,"Z","trace generated IR for specified phases") DEFINE_bool(trace_inlining, false,"trace inlining decisions") DEFINE_bool(trace_alloc, false,"trace register allocator") DEFINE_bool(trace_all_uses, false,"trace all use positions") DEFINE_bool(trace_range, false,"trace range analysis") DEFINE_bool(trace_gvn, false,"trace global value numbering") DEFINE_bool(trace_representation, false,"trace representation types") DEFINE_bool(stress_pointer_maps, false,"pointer map for every instruction") DEFINE_bool(stress_environments, false,"environment for every instruction") DEFINE_int(deopt_every_n_times, 0,"deoptimize every n times a deopt point is passed") DEFINE_bool(trap_on_deopt, false,"put a break point before deoptimizing") DEFINE_bool(deoptimize_uncommon_cases, true,"deoptimize uncommon cases") DEFINE_bool(polymorphic_inlining, true,"polymorphic inlining") DEFINE_bool(use_osr, true,"use on-stack replacement") DEFINE_bool(array_bounds_checks_elimination, false,"perform array bounds checks elimination") DEFINE_bool(array_index_dehoisting, false,"perform array index dehoisting") DEFINE_bool(trace_osr, false,"trace on-stack replacement") DEFINE_int(stress_runs, 0,"number of stress runs") DEFINE_bool(optimize_closures, true,"optimize closures") DEFINE_bool(inline_construct, true,"inline constructor calls") DEFINE_bool(inline_arguments, true,"inline functions with arguments object") DEFINE_int(loop_weight, 1,"loop weight for representation inference") DEFINE_bool(optimize_for_in, true,"optimize functions containing for-in loops") DEFINE_bool(experimental_profiler, true,"enable all profiler experiments") DEFINE_bool(watch_ic_patching, false,"profiler considers IC stability") DEFINE_int(frame_count, 1,"number of stack frames inspected by the profiler") DEFINE_bool(self_optimization, false,"primitive functions trigger their own optimization") DEFINE_bool(direct_self_opt, false,"call recompile stub directly when self-optimizing") DEFINE_bool(retry_self_opt, false,"re-try self-optimization if it failed") DEFINE_bool(count_based_interrupts, false,"trigger profiler ticks based on counting instead of timing") DEFINE_bool(interrupt_at_exit, false,"insert an interrupt check at function exit") DEFINE_bool(weighted_back_edges, false,"weight back edges by jump distance for interrupt triggering") DEFINE_int(interrupt_budget, 5900,"execution budget before interrupt is triggered") DEFINE_int(type_info_threshold, 15,"percentage of ICs that must have type info to allow optimization") DEFINE_int(self_opt_count, 130,"call count before self-optimization") DEFINE_implication(experimental_profiler, watch_ic_patching) DEFINE_implication(experimental_profiler, self_optimization) DEFINE_implication(experimental_profiler, retry_self_opt) DEFINE_implication(experimental_profiler, count_based_interrupts) DEFINE_implication(experimental_profiler, interrupt_at_exit) DEFINE_implication(experimental_profiler, weighted_back_edges) DEFINE_bool(trace_opt_verbose, false,"extra verbose compilation tracing") DEFINE_implication(trace_opt_verbose, trace_opt) DEFINE_bool(debug_code, false,"generate extra code (assertions) for debugging") DEFINE_bool(code_comments, false,"emit comments in code disassembly") DEFINE_bool(enable_sse2, true,"enable use of SSE2 instructions if available") DEFINE_bool(enable_sse3, true,"enable use of SSE3 instructions if available") DEFINE_bool(enable_sse4_1, true,"enable use of SSE4.1 instructions if available") DEFINE_bool(enable_cmov, true,"enable use of CMOV instruction if available") DEFINE_bool(enable_rdtsc, true,"enable use of RDTSC instruction if available") DEFINE_bool(enable_sahf, true,"enable use of SAHF instruction if available (X64 only)") DEFINE_bool(enable_vfp3, true,"enable use of VFP3 instructions if available - this implies ""enabling ARMv7 instructions (ARM only)") DEFINE_bool(enable_armv7, true,"enable use of ARMv7 instructions if available (ARM only)") DEFINE_bool(enable_fpu, true,"enable use of MIPS FPU instructions if available (MIPS only)") DEFINE_string(expose_natives_as, NULL,"expose natives in global object") DEFINE_string(expose_debug_as, NULL,"expose debug in global object") DEFINE_bool(expose_gc, false,"expose gc extension") DEFINE_bool(expose_externalize_string, false,"expose externalize string extension") DEFINE_int(stack_trace_limit, 10,"number of stack frames to capture") DEFINE_bool(builtins_in_stack_traces, false,"show built-in functions in stack traces") DEFINE_bool(disable_native_files, false,"disable builtin natives files") DEFINE_bool(inline_new, true,"use fast inline allocation") DEFINE_bool(stack_trace_on_abort, true,"print a stack trace if an assertion failure occurs") DEFINE_bool(trace, false,"trace function calls") DEFINE_bool(mask_constants_with_cookie, true,"use random jit cookie to mask large constants") DEFINE_bool(lazy, true,"use lazy compilation") DEFINE_bool(trace_opt, false,"trace lazy optimization") DEFINE_bool(trace_opt_stats, false,"trace lazy optimization statistics") DEFINE_bool(opt, true,"use adaptive optimizations") DEFINE_bool(always_opt, false,"always try to optimize functions") DEFINE_bool(prepare_always_opt, false,"prepare for turning on always opt") DEFINE_bool(trace_deopt, false,"trace deoptimization") DEFINE_int(min_preparse_length, 1024,"minimum length for automatic enable preparsing") DEFINE_bool(always_full_compiler, false,"try to use the dedicated run-once backend for all code") DEFINE_bool(trace_bailout, false,"print reasons for falling back to using the classic V8 backend") DEFINE_bool(compilation_cache, true,"enable compilation cache") DEFINE_bool(cache_prototype_transitions, true,"cache prototype transitions") DEFINE_bool(trace_debug_json, false,"trace debugging JSON request/response") DEFINE_bool(debugger_auto_break, true,"automatically set the debug break flag when debugger commands are ""in the queue") DEFINE_bool(enable_liveedit, true,"enable liveedit experimental feature") DEFINE_bool(break_on_abort, true,"always cause a debug break before aborting") DEFINE_int(stack_size, kPointerSize *123,"default size of stack region v8 is allowed to use (in kBytes)") DEFINE_int(max_stack_trace_source_length, 300,"maximum length of function source code printed in a stack trace.") DEFINE_bool(always_inline_smi_code, false,"always inline smi code in non-opt code") DEFINE_int(max_new_space_size, 0,"max size of the new generation (in kBytes)") DEFINE_int(max_old_space_size, 0,"max size of the old generation (in Mbytes)") DEFINE_int(max_executable_size, 0,"max size of executable memory (in Mbytes)") DEFINE_bool(gc_global, false,"always perform global GCs") DEFINE_int(gc_interval,-1,"garbage collect after <n> allocations") DEFINE_bool(trace_gc, false,"print one trace line following each garbage collection") DEFINE_bool(trace_gc_nvp, false,"print one detailed trace line in name=value format ""after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false,"print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false,"print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false,"report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true,"garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true,"flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true,"use incremental marking") DEFINE_bool(incremental_marking_steps, true,"do incremental marking steps") DEFINE_bool(trace_incremental_marking, false,"trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true,"Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false,"Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true,"use inline caching") DEFINE_bool(native_code_counters, false,"generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false,"Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true,"Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false,"Never perform compaction on full GC - testing only") DEFINE_bool(compact_code_space, true,"Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true,"Flush inline caches prior to mark compact collection and ""flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0,"Default seed for initializing random generator ""(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true,"allows verbose printing") DEFINE_bool(allow_natives_syntax, false,"allow natives syntax") DEFINE_bool(trace_sim, false,"Trace simulator execution") DEFINE_bool(check_icache, false,"Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0,"Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8,"Stack alingment in bytes in simulator (4 or 8, 8 is default)") DEFINE_bool(trace_exception, false,"print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false,"preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true,"randomize hashes to avoid predictable hash collisions ""(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0,"Fixed seed to use to hash property keys (0 means random)""(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false,"activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true,"generate optimized regexp code") DEFINE_bool(testing_bool_flag, true,"testing_bool_flag") DEFINE_int(testing_int_flag, 13,"testing_int_flag") DEFINE_float(testing_float_flag, 2.5,"float-flag") DEFINE_string(testing_string_flag,"Hello, world!","string-flag") DEFINE_int(testing_prng_seed, 42,"Seed used for threading test randomness") DEFINE_string(testing_serialization_file,"/tmp/serdes","file in which to serialize heap") DEFINE_bool(help, false,"Print usage message, including flags, on console") DEFINE_bool(dump_counters, false,"Dump counters on exit") DEFINE_string(map_counters,"","Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT,"Pass all remaining arguments to the script. Alias for \"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#47"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2 namespace{struct Flag{enum FlagType{TYPE_BOOL, TYPE_INT, TYPE_FLOAT, TYPE_STRING, TYPE_ARGS} name
virtual void PrintDataTo(StringStream *stream)
LLabel(HBasicBlock *block)
Handle< String > name() const
static const int kNoNumber
static const int kNumAllocatableRegisters
Handle< Object > name() const
LEnvironment * environment() const
#define ASSERT(condition)
virtual const char * Mnemonic() const =0
virtual void PrintDataTo(StringStream *stream)
void PrintTo(StringStream *stream)
bool IsSimpleMapChangeTransition(ElementsKind from_kind, ElementsKind to_kind)
#define LITHIUM_CONCRETE_INSTRUCTION_LIST(V)
Representation representation() const
EqualityKind kind() const
LGap * GetGapAt(int index) const
virtual void PrintDataTo(StringStream *stream)
virtual void PrintDataTo(StringStream *stream)
virtual void PrintDataTo(StringStream *stream)
virtual bool HasResult() const =0
virtual void PrintDataTo(StringStream *stream)
int GetNextSpillIndex(bool is_double)
void PrintTo(StringStream *stream)
LLabel * replacement() const
virtual const char * Mnemonic() const
virtual void PrintDataTo(StringStream *stream)
virtual void PrintDataTo(StringStream *stream)
void MarkSpilledRegister(int allocation_index, LOperand *spill_operand)
LOperand * GetNextSpillSlot(bool is_double)
void AddMove(LOperand *from, LOperand *to, Zone *zone)
static const char * String(Value tok)
static LDoubleStackSlot * Create(int index, Zone *zone)
LOperand * InputAt(int i)
virtual void PrintDataTo(StringStream *stream)
bool HasEnvironment() const
static void VPrint(const char *format, va_list args)
virtual void PrintDataTo(StringStream *stream)
virtual LOperand * result()=0
virtual void PrintDataTo(StringStream *stream)
static int ToAllocationIndex(Register reg)
virtual void PrintDataTo(StringStream *stream)
virtual void PrintTo(StringStream *stream)
virtual void PrintDataTo(StringStream *stream)
static LStackSlot * Create(int index, Zone *zone)
virtual void PrintDataTo(StringStream *stream)
static const int kMaxFixedIndex
bool IsGapAt(int index) const
LPointerMap * pointer_map() const
const ZoneList< HBasicBlock * > * blocks() const
static int ToAllocationIndex(XMMRegister reg)
virtual void PrintDataTo(StringStream *stream)
LLabel * GetLabel(int block_id) const
virtual DECLARE_CONCRETE_INSTRUCTION(StringCompareAndBranch,"string-compare-and-branch") Token void PrintDataTo(StringStream *stream)
void AddInstruction(LInstruction *instruction, HBasicBlock *block)
virtual void PrintDataTo(StringStream *stream)
void PrintDataTo(StringStream *stream) const
virtual const char * Mnemonic() const
CompilationInfo * info() const
static const int kNumAllocatableRegisters
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination trace on stack replacement optimize closures functions with arguments object optimize functions containing for in loops profiler considers IC stability primitive functions trigger their own optimization re try self optimization if it failed insert an interrupt check at function exit execution budget before interrupt is triggered call count before self optimization self_optimization count_based_interrupts weighted_back_edges trace_opt emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 enable use of ARMv7 instructions if enable use of MIPS FPU instructions if NULL
void AddGapMove(int index, LOperand *from, LOperand *to)
virtual void PrintDataTo(StringStream *stream)
virtual void PrintDataTo(StringStream *stream)
Handle< String > name() const
LConstantOperand * DefineConstantOperand(HConstant *constant)
virtual void PrintDataTo(StringStream *stream)
Representation LookupLiteralRepresentation(LConstantOperand *operand) const
bool HasPointerMap() const
int NearestGapPos(int index) const
virtual void PrintDataTo(StringStream *stream)
virtual int InputCount()=0
static HValue * cast(HValue *value)
Handle< String > type_literal()
void PrintTo(StringStream *stream)
Handle< Object > LookupLiteral(LConstantOperand *operand) const
virtual void PrintDataTo(StringStream *stream)
Handle< Map > original_map()
const ZoneList< LInstruction * > * instructions() const
virtual void PrintDataTo(StringStream *stream)