30 #if defined(V8_TARGET_ARCH_IA32)
39 #define DEFINE_COMPILE(type) \
40 void L##type::CompileToNative(LCodeGen* generator) { \
41 generator->Do##type(this); \
48 register_spills_[i] =
NULL;
51 double_register_spills_[i] =
NULL;
57 LOperand* spill_operand) {
58 ASSERT(spill_operand->IsStackSlot());
59 ASSERT(register_spills_[allocation_index] ==
NULL);
60 register_spills_[allocation_index] = spill_operand;
65 LOperand* spill_operand) {
66 ASSERT(spill_operand->IsDoubleStackSlot());
67 ASSERT(double_register_spills_[allocation_index] ==
NULL);
68 double_register_spills_[allocation_index] = spill_operand;
73 void LInstruction::VerifyCall() {
81 for (UseIterator it(
this); !it.Done(); it.Advance()) {
83 ASSERT(operand->HasFixedPolicy() ||
84 operand->IsUsedAtStart());
86 for (TempIterator it(
this); !it.Done(); it.Advance()) {
88 ASSERT(operand->HasFixedPolicy() ||!operand->HasRegisterPolicy());
95 stream->Add(
"%s ", this->
Mnemonic());
116 if (i > 0) stream->Add(
" ");
131 stream->Add(
" Dead block replaced with B%d", rep->block_id());
137 for (
int i = 0; i < 4; i++) {
138 if (parallel_moves_[i] !=
NULL && !parallel_moves_[i]->
IsRedundant()) {
148 for (
int i = 0; i < 4; i++) {
150 if (parallel_moves_[i] !=
NULL) {
164 case Token::MOD:
return "mod-d";
177 case Token::MOD:
return "mod-t";
179 case Token::BIT_AND:
return "bit-and-t";
180 case Token::BIT_OR:
return "bit-or-t";
181 case Token::BIT_XOR:
return "bit-xor-t";
182 case Token::SHL:
return "sal-t";
183 case Token::SAR:
return "sar-t";
184 case Token::SHR:
return "shr-t";
222 stream->Add(
"if is_object(");
229 stream->Add(
"if is_string(");
236 stream->Add(
"if is_smi(");
243 stream->Add(
"if is_undetectable(");
250 stream->Add(
"if string_compare(");
258 stream->Add(
"if has_instance_type(");
265 stream->Add(
"if has_cached_array_index(");
272 stream->Add(
"if class_of_test(");
274 stream->Add(
", \"%o\") then B%d else B%d",
275 *hydrogen()->class_name(),
282 stream->Add(
"if typeof ");
284 stream->Add(
" == \"%s\" then B%d else B%d",
291 stream->Add(
"#%d / ",
arity());
296 stream->Add(
"/%s ", hydrogen()->OpName());
302 stream->Add(
"/pow_half ");
325 stream->Add(
" #%d / ",
arity());
330 stream->Add(
"[ecx] #%d / ",
arity());
335 SmartArrayPointer<char> name_string =
name()->ToCString();
336 stream->Add(
"%s #%d / ", *name_string,
arity());
341 SmartArrayPointer<char> name_string =
name()->ToCString();
342 stream->Add(
"%s #%d / ", *name_string,
arity());
347 stream->Add(
"#%d / ",
arity());
354 stream->Add(
" #%d / ",
arity());
361 stream->Add(
" length ");
364 stream->Add(
" index ");
373 spill_slot_count_ |= 1;
376 return spill_slot_count_++;
391 HPhase phase(
"L_Mark empty blocks",
this);
392 for (
int i = 0; i <
graph()->
blocks()->length(); ++i) {
394 int first = block->first_instruction_index();
395 int last = block->last_instruction_index();
400 if (last_instr->IsGoto()) {
401 LGoto* goto_instr = LGoto::cast(last_instr);
402 if (label->IsRedundant() &&
403 !label->is_loop_header()) {
404 bool can_eliminate =
true;
405 for (
int i = first + 1; i < last && can_eliminate; ++i) {
409 if (!gap->IsRedundant()) {
410 can_eliminate =
false;
413 can_eliminate =
false;
418 label->set_replacement(
GetLabel(goto_instr->block_id()));
448 stream->Add(
"] <- ");
457 stream->Add(
"] <- ");
466 stream->Add(
"] <- ");
478 LInstructionGap* gap =
new(graph_->
zone()) LInstructionGap(block);
480 if (instr->IsControl()) {
481 instructions_.Add(gap,
zone());
482 index = instructions_.length();
483 instructions_.Add(instr,
zone());
485 index = instructions_.length();
486 instructions_.Add(instr,
zone());
487 instructions_.Add(gap,
zone());
489 if (instr->HasPointerMap()) {
490 pointer_maps_.Add(instr->pointer_map(),
zone());
491 instr->pointer_map()->set_lithium_position(index);
506 int result = index -
info()->scope()->num_parameters() - 1;
514 return (1 +
info()->scope()->num_parameters() - index) *
525 return instructions_[index]->IsGap();
530 while (!
IsGapAt(index)) index--;
547 LConstantOperand* operand)
const {
548 return graph_->
LookupValue(operand->index())->representation();
552 LChunk* LChunkBuilder::Build() {
554 chunk_ =
new(zone()) LChunk(info(), graph());
555 HPhase phase(
"L_Building chunk", chunk_);
559 int alignment_state_index = chunk_->GetNextSpillIndex(
false);
561 USE(alignment_state_index);
563 const ZoneList<HBasicBlock*>* blocks = graph()->blocks();
564 for (
int i = 0; i < blocks->length(); i++) {
565 HBasicBlock* next =
NULL;
566 if (i < blocks->length() - 1) next = blocks->at(i + 1);
567 DoBasicBlock(blocks->at(i), next);
568 if (is_aborted())
return NULL;
575 void LChunkBuilder::Abort(
const char* format, ...) {
576 if (FLAG_trace_bailout) {
577 SmartArrayPointer<char>
name(
578 info()->shared_info()->DebugName()->
ToCString());
579 PrintF(
"Aborting LChunk building in @\"%s\": ", *
name);
581 va_start(arguments, format);
590 LUnallocated* LChunkBuilder::ToUnallocated(Register reg) {
596 LUnallocated* LChunkBuilder::ToUnallocated(XMMRegister reg) {
602 LOperand* LChunkBuilder::UseFixed(HValue* value, Register fixed_register) {
603 return Use(value, ToUnallocated(fixed_register));
607 LOperand* LChunkBuilder::UseFixedDouble(HValue* value, XMMRegister reg) {
608 return Use(value, ToUnallocated(reg));
612 LOperand* LChunkBuilder::UseRegister(HValue* value) {
617 LOperand* LChunkBuilder::UseRegisterAtStart(HValue* value) {
624 LOperand* LChunkBuilder::UseTempRegister(HValue* value) {
629 LOperand* LChunkBuilder::Use(HValue* value) {
634 LOperand* LChunkBuilder::UseAtStart(HValue* value) {
640 LOperand* LChunkBuilder::UseOrConstant(HValue* value) {
641 return value->IsConstant()
647 LOperand* LChunkBuilder::UseOrConstantAtStart(HValue* value) {
648 return value->IsConstant()
654 LOperand* LChunkBuilder::UseRegisterOrConstant(HValue* value) {
655 return value->IsConstant()
657 : UseRegister(value);
661 LOperand* LChunkBuilder::UseRegisterOrConstantAtStart(HValue* value) {
662 return value->IsConstant()
664 : UseRegisterAtStart(value);
668 LOperand* LChunkBuilder::UseAny(HValue* value) {
669 return value->IsConstant()
675 LOperand* LChunkBuilder::Use(HValue* value, LUnallocated* operand) {
676 if (value->EmitAtUses()) {
678 VisitInstruction(instr);
680 operand->set_virtual_register(value->id());
685 template<
int I,
int T>
686 LInstruction* LChunkBuilder::Define(LTemplateInstruction<1, I, T>* instr,
687 LUnallocated* result) {
688 result->set_virtual_register(current_instruction_->id());
689 instr->set_result(result);
694 template<
int I,
int T>
695 LInstruction* LChunkBuilder::DefineAsRegister(
696 LTemplateInstruction<1, I, T>* instr) {
702 template<
int I,
int T>
703 LInstruction* LChunkBuilder::DefineAsSpilled(
704 LTemplateInstruction<1, I, T>* instr,
711 template<
int I,
int T>
712 LInstruction* LChunkBuilder::DefineSameAsFirst(
713 LTemplateInstruction<1, I, T>* instr) {
719 template<
int I,
int T>
720 LInstruction* LChunkBuilder::DefineFixed(LTemplateInstruction<1, I, T>* instr,
722 return Define(instr, ToUnallocated(reg));
726 template<
int I,
int T>
727 LInstruction* LChunkBuilder::DefineFixedDouble(
728 LTemplateInstruction<1, I, T>* instr,
730 return Define(instr, ToUnallocated(reg));
734 LInstruction* LChunkBuilder::AssignEnvironment(LInstruction* instr) {
735 HEnvironment* hydrogen_env = current_block_->last_environment();
736 int argument_index_accumulator = 0;
737 instr->set_environment(CreateEnvironment(hydrogen_env,
738 &argument_index_accumulator));
743 LInstruction* LChunkBuilder::MarkAsCall(LInstruction* instr,
744 HInstruction* hinstr,
745 CanDeoptimize can_deoptimize) {
750 instr = AssignPointerMap(instr);
752 if (hinstr->HasObservableSideEffects()) {
753 ASSERT(hinstr->next()->IsSimulate());
755 ASSERT(instruction_pending_deoptimization_environment_ ==
NULL);
757 instruction_pending_deoptimization_environment_ = instr;
758 pending_deoptimization_ast_id_ = sim->ast_id();
765 bool needs_environment =
766 (can_deoptimize == CAN_DEOPTIMIZE_EAGERLY) ||
767 !hinstr->HasObservableSideEffects();
768 if (needs_environment && !instr->HasEnvironment()) {
769 instr = AssignEnvironment(instr);
776 LInstruction* LChunkBuilder::AssignPointerMap(LInstruction* instr) {
777 ASSERT(!instr->HasPointerMap());
778 instr->set_pointer_map(
new(zone()) LPointerMap(position_, zone()));
783 LUnallocated* LChunkBuilder::TempRegister() {
784 LUnallocated* operand =
786 operand->set_virtual_register(allocator_->GetVirtualRegister());
787 if (!allocator_->AllocationOk()) {
788 Abort(
"Not enough virtual registers (temps).");
794 LOperand* LChunkBuilder::FixedTemp(Register reg) {
795 LUnallocated* operand = ToUnallocated(reg);
796 ASSERT(operand->HasFixedPolicy());
801 LOperand* LChunkBuilder::FixedTemp(XMMRegister reg) {
802 LUnallocated* operand = ToUnallocated(reg);
803 ASSERT(operand->HasFixedPolicy());
808 LInstruction* LChunkBuilder::DoBlockEntry(HBlockEntry* instr) {
809 return new(zone()) LLabel(instr->block());
813 LInstruction* LChunkBuilder::DoSoftDeoptimize(HSoftDeoptimize* instr) {
814 return AssignEnvironment(
new(zone()) LDeoptimize);
818 LInstruction* LChunkBuilder::DoDeoptimize(HDeoptimize* instr) {
819 return AssignEnvironment(
new(zone()) LDeoptimize);
824 HBitwiseBinaryOperation* instr) {
825 if (instr->representation().IsTagged()) {
826 ASSERT(instr->left()->representation().IsTagged());
827 ASSERT(instr->right()->representation().IsTagged());
829 LOperand* context = UseFixed(instr->context(),
esi);
830 LOperand* left = UseFixed(instr->left(),
edx);
831 LOperand* right = UseFixed(instr->right(),
eax);
832 LArithmeticT* result =
new(zone()) LArithmeticT(op, context, left, right);
833 return MarkAsCall(DefineFixed(result,
eax), instr);
836 ASSERT(instr->representation().IsInteger32());
837 ASSERT(instr->left()->representation().IsInteger32());
838 ASSERT(instr->right()->representation().IsInteger32());
839 LOperand* left = UseRegisterAtStart(instr->left());
841 HValue* right_value = instr->right();
842 LOperand* right =
NULL;
843 int constant_value = 0;
844 if (right_value->IsConstant()) {
846 right = chunk_->DefineConstantOperand(constant);
847 constant_value = constant->Integer32Value() & 0x1f;
849 right = UseFixed(right_value,
ecx);
854 bool may_deopt = (op == Token::SHR && constant_value == 0);
855 bool does_deopt =
false;
857 for (HUseIterator it(instr->uses()); !it.Done(); it.Advance()) {
865 LInstruction* result =
866 DefineSameAsFirst(
new(zone()) LShiftI(op, left, right, does_deopt));
867 return does_deopt ? AssignEnvironment(result) : result;
871 LInstruction* LChunkBuilder::DoArithmeticD(
Token::Value op,
872 HArithmeticBinaryOperation* instr) {
873 ASSERT(instr->representation().IsDouble());
874 ASSERT(instr->left()->representation().IsDouble());
875 ASSERT(instr->right()->representation().IsDouble());
877 LOperand* left = UseRegisterAtStart(instr->left());
878 LOperand* right = UseRegisterAtStart(instr->right());
879 LArithmeticD* result =
new(zone()) LArithmeticD(op, left, right);
880 return DefineSameAsFirst(result);
884 LInstruction* LChunkBuilder::DoArithmeticT(
Token::Value op,
885 HArithmeticBinaryOperation* instr) {
891 HValue* left = instr->left();
892 HValue* right = instr->right();
893 ASSERT(left->representation().IsTagged());
894 ASSERT(right->representation().IsTagged());
895 LOperand* context = UseFixed(instr->context(),
esi);
896 LOperand* left_operand = UseFixed(left,
edx);
897 LOperand* right_operand = UseFixed(right,
eax);
898 LArithmeticT* result =
899 new(zone()) LArithmeticT(op, context, left_operand, right_operand);
900 return MarkAsCall(DefineFixed(result,
eax), instr);
904 void LChunkBuilder::DoBasicBlock(HBasicBlock* block, HBasicBlock* next_block) {
906 current_block_ = block;
907 next_block_ = next_block;
908 if (block->IsStartBlock()) {
909 block->UpdateEnvironment(graph_->start_environment());
911 }
else if (block->predecessors()->length() == 1) {
914 ASSERT(block->phis()->length() == 0);
915 HBasicBlock* pred = block->predecessors()->at(0);
916 HEnvironment* last_environment = pred->last_environment();
919 if (pred->end()->SecondSuccessor() ==
NULL) {
920 ASSERT(pred->end()->FirstSuccessor() == block);
922 if (pred->end()->FirstSuccessor()->block_id() > block->block_id() ||
923 pred->end()->SecondSuccessor()->block_id() > block->block_id()) {
924 last_environment = last_environment->Copy();
927 block->UpdateEnvironment(last_environment);
928 ASSERT(pred->argument_count() >= 0);
929 argument_count_ = pred->argument_count();
932 HBasicBlock* pred = block->predecessors()->at(0);
934 HEnvironment* last_environment = pred->last_environment();
935 for (
int i = 0; i < block->phis()->length(); ++i) {
936 HPhi* phi = block->phis()->at(i);
937 last_environment->SetValueAt(phi->merged_index(), phi);
939 for (
int i = 0; i < block->deleted_phis()->length(); ++i) {
940 last_environment->SetValueAt(block->deleted_phis()->at(i),
941 graph_->GetConstantUndefined());
943 block->UpdateEnvironment(last_environment);
945 argument_count_ = pred->argument_count();
947 HInstruction* current = block->first();
948 int start = chunk_->instructions()->length();
949 while (current !=
NULL && !is_aborted()) {
951 if (!current->EmitAtUses()) {
952 VisitInstruction(current);
954 current = current->next();
956 int end = chunk_->instructions()->length() - 1;
958 block->set_first_instruction_index(start);
959 block->set_last_instruction_index(end);
961 block->set_argument_count(argument_count_);
963 current_block_ =
NULL;
967 void LChunkBuilder::VisitInstruction(HInstruction* current) {
968 HInstruction* old_current = current_instruction_;
969 current_instruction_ = current;
970 if (current->has_position()) position_ = current->position();
971 LInstruction* instr = current->CompileToLithium(
this);
974 if (FLAG_stress_pointer_maps && !instr->HasPointerMap()) {
975 instr = AssignPointerMap(instr);
977 if (FLAG_stress_environments && !instr->HasEnvironment()) {
978 instr = AssignEnvironment(instr);
980 instr->set_hydrogen_value(current);
981 chunk_->AddInstruction(instr, current_block_);
983 current_instruction_ = old_current;
987 LEnvironment* LChunkBuilder::CreateEnvironment(
988 HEnvironment* hydrogen_env,
989 int* argument_index_accumulator) {
990 if (hydrogen_env ==
NULL)
return NULL;
992 LEnvironment* outer =
993 CreateEnvironment(hydrogen_env->outer(), argument_index_accumulator);
994 int ast_id = hydrogen_env->ast_id();
997 int value_count = hydrogen_env->length();
998 LEnvironment* result =
999 new(zone()) LEnvironment(hydrogen_env->closure(),
1000 hydrogen_env->frame_type(),
1002 hydrogen_env->parameter_count(),
1007 int argument_index = *argument_index_accumulator;
1008 for (
int i = 0; i < value_count; ++i) {
1009 if (hydrogen_env->is_special_index(i))
continue;
1011 HValue* value = hydrogen_env->values()->at(i);
1012 LOperand* op =
NULL;
1013 if (value->IsArgumentsObject()) {
1015 }
else if (value->IsPushArgument()) {
1016 op =
new(zone()) LArgument(argument_index++);
1020 result->AddValue(op, value->representation());
1024 *argument_index_accumulator = argument_index;
1031 LInstruction* LChunkBuilder::DoGoto(HGoto* instr) {
1032 return new(zone()) LGoto(instr->FirstSuccessor()->block_id());
1036 LInstruction* LChunkBuilder::DoBranch(HBranch* instr) {
1037 HValue* value = instr->value();
1038 if (value->EmitAtUses()) {
1039 ASSERT(value->IsConstant());
1040 ASSERT(!value->representation().IsDouble());
1042 ? instr->FirstSuccessor()
1043 : instr->SecondSuccessor();
1044 return new(zone()) LGoto(successor->block_id());
1050 HType
type = value->type();
1051 if (!rep.IsTagged() || type.IsSmi() || type.IsBoolean()) {
1052 return new(zone()) LBranch(UseRegister(value),
NULL);
1055 ToBooleanStub::Types expected = instr->expected_input_types();
1059 bool needs_temp = expected.NeedsMap() || expected.IsEmpty();
1060 LOperand* temp = needs_temp ? TempRegister() :
NULL;
1061 return AssignEnvironment(
new(zone()) LBranch(UseRegister(value), temp));
1065 LInstruction* LChunkBuilder::DoCompareMap(HCompareMap* instr) {
1066 ASSERT(instr->value()->representation().IsTagged());
1067 LOperand* value = UseRegisterAtStart(instr->value());
1068 return new(zone()) LCmpMapAndBranch(value);
1072 LInstruction* LChunkBuilder::DoArgumentsLength(HArgumentsLength* length) {
1073 return DefineAsRegister(
new(zone()) LArgumentsLength(Use(length->value())));
1077 LInstruction* LChunkBuilder::DoArgumentsElements(HArgumentsElements* elems) {
1078 return DefineAsRegister(
new(zone()) LArgumentsElements);
1082 LInstruction* LChunkBuilder::DoInstanceOf(HInstanceOf* instr) {
1085 LOperand* context = UseFixed(instr->context(),
esi);
1086 LInstanceOf* result =
new(zone()) LInstanceOf(context, left, right);
1087 return MarkAsCall(DefineFixed(result,
eax), instr);
1091 LInstruction* LChunkBuilder::DoInstanceOfKnownGlobal(
1092 HInstanceOfKnownGlobal* instr) {
1093 LInstanceOfKnownGlobal* result =
1094 new(zone()) LInstanceOfKnownGlobal(
1095 UseFixed(instr->context(),
esi),
1098 return MarkAsCall(DefineFixed(result,
eax), instr);
1102 LInstruction* LChunkBuilder::DoWrapReceiver(HWrapReceiver* instr) {
1103 LOperand* receiver = UseRegister(instr->receiver());
1104 LOperand*
function = UseRegisterAtStart(instr->function());
1105 LOperand* temp = TempRegister();
1106 LWrapReceiver* result =
1107 new(zone()) LWrapReceiver(receiver,
function, temp);
1108 return AssignEnvironment(DefineSameAsFirst(result));
1112 LInstruction* LChunkBuilder::DoApplyArguments(HApplyArguments* instr) {
1113 LOperand*
function = UseFixed(instr->function(),
edi);
1114 LOperand* receiver = UseFixed(instr->receiver(),
eax);
1115 LOperand* length = UseFixed(instr->length(),
ebx);
1116 LOperand* elements = UseFixed(instr->elements(),
ecx);
1117 LApplyArguments* result =
new(zone()) LApplyArguments(
function,
1121 return MarkAsCall(DefineFixed(result,
eax), instr, CAN_DEOPTIMIZE_EAGERLY);
1125 LInstruction* LChunkBuilder::DoPushArgument(HPushArgument* instr) {
1127 LOperand* argument = UseAny(instr->argument());
1128 return new(zone()) LPushArgument(argument);
1132 LInstruction* LChunkBuilder::DoThisFunction(HThisFunction* instr) {
1133 return instr->HasNoUses()
1135 : DefineAsRegister(
new(zone()) LThisFunction);
1139 LInstruction* LChunkBuilder::DoContext(HContext* instr) {
1140 return instr->HasNoUses() ?
NULL : DefineAsRegister(
new(zone()) LContext);
1144 LInstruction* LChunkBuilder::DoOuterContext(HOuterContext* instr) {
1145 LOperand* context = UseRegisterAtStart(instr->value());
1146 return DefineAsRegister(
new(zone()) LOuterContext(context));
1150 LInstruction* LChunkBuilder::DoDeclareGlobals(HDeclareGlobals* instr) {
1151 LOperand* context = UseFixed(instr->context(),
esi);
1152 return MarkAsCall(
new(zone()) LDeclareGlobals(context), instr);
1156 LInstruction* LChunkBuilder::DoGlobalObject(HGlobalObject* instr) {
1157 LOperand* context = UseRegisterAtStart(instr->value());
1158 return DefineAsRegister(
new(zone()) LGlobalObject(context));
1162 LInstruction* LChunkBuilder::DoGlobalReceiver(HGlobalReceiver* instr) {
1163 LOperand* global_object = UseRegisterAtStart(instr->value());
1164 return DefineAsRegister(
new(zone()) LGlobalReceiver(global_object));
1168 LInstruction* LChunkBuilder::DoCallConstantFunction(
1169 HCallConstantFunction* instr) {
1170 argument_count_ -= instr->argument_count();
1171 return MarkAsCall(DefineFixed(
new(zone()) LCallConstantFunction,
eax), instr);
1175 LInstruction* LChunkBuilder::DoInvokeFunction(HInvokeFunction* instr) {
1176 LOperand* context = UseFixed(instr->context(),
esi);
1177 LOperand*
function = UseFixed(instr->function(),
edi);
1178 argument_count_ -= instr->argument_count();
1179 LInvokeFunction* result =
new(zone()) LInvokeFunction(context,
function);
1180 return MarkAsCall(DefineFixed(result,
eax), instr, CANNOT_DEOPTIMIZE_EAGERLY);
1184 LInstruction* LChunkBuilder::DoUnaryMathOperation(HUnaryMathOperation* instr) {
1186 if (op == kMathLog) {
1187 ASSERT(instr->representation().IsDouble());
1188 ASSERT(instr->value()->representation().IsDouble());
1189 LOperand* context = UseAny(instr->context());
1190 LOperand* input = UseRegisterAtStart(instr->value());
1191 LUnaryMathOperation* result =
new(zone()) LUnaryMathOperation(context,
1193 return DefineSameAsFirst(result);
1194 }
else if (op == kMathSin || op == kMathCos || op == kMathTan) {
1195 LOperand* context = UseFixed(instr->context(),
esi);
1196 LOperand* input = UseFixedDouble(instr->value(),
xmm1);
1197 LUnaryMathOperation* result =
new(zone()) LUnaryMathOperation(context,
1199 return MarkAsCall(DefineFixedDouble(result,
xmm1), instr);
1201 LOperand* input = UseRegisterAtStart(instr->value());
1202 LOperand* context = UseAny(instr->context());
1204 LOperand* temp = TempRegister();
1205 LMathPowHalf* result =
new(zone()) LMathPowHalf(context, input, temp);
1206 return DefineSameAsFirst(result);
1208 LUnaryMathOperation* result =
new(zone()) LUnaryMathOperation(context,
1212 return AssignEnvironment(AssignPointerMap(DefineSameAsFirst(result)));
1214 return AssignEnvironment(DefineAsRegister(result));
1216 return AssignEnvironment(DefineAsRegister(result));
1218 return DefineSameAsFirst(result);
1227 LInstruction* LChunkBuilder::DoCallKeyed(HCallKeyed* instr) {
1228 ASSERT(instr->key()->representation().IsTagged());
1229 LOperand* context = UseFixed(instr->context(),
esi);
1230 LOperand* key = UseFixed(instr->key(),
ecx);
1231 argument_count_ -= instr->argument_count();
1232 LCallKeyed* result =
new(zone()) LCallKeyed(context, key);
1233 return MarkAsCall(DefineFixed(result,
eax), instr);
1237 LInstruction* LChunkBuilder::DoCallNamed(HCallNamed* instr) {
1238 LOperand* context = UseFixed(instr->context(),
esi);
1239 argument_count_ -= instr->argument_count();
1240 LCallNamed* result =
new(zone()) LCallNamed(context);
1241 return MarkAsCall(DefineFixed(result,
eax), instr);
1245 LInstruction* LChunkBuilder::DoCallGlobal(HCallGlobal* instr) {
1246 LOperand* context = UseFixed(instr->context(),
esi);
1247 argument_count_ -= instr->argument_count();
1248 LCallGlobal* result =
new(zone()) LCallGlobal(context);
1249 return MarkAsCall(DefineFixed(result,
eax), instr);
1253 LInstruction* LChunkBuilder::DoCallKnownGlobal(HCallKnownGlobal* instr) {
1254 argument_count_ -= instr->argument_count();
1255 return MarkAsCall(DefineFixed(
new(zone()) LCallKnownGlobal,
eax), instr);
1259 LInstruction* LChunkBuilder::DoCallNew(HCallNew* instr) {
1260 LOperand* context = UseFixed(instr->context(),
esi);
1261 LOperand* constructor = UseFixed(instr->constructor(),
edi);
1262 argument_count_ -= instr->argument_count();
1263 LCallNew* result =
new(zone()) LCallNew(context, constructor);
1264 return MarkAsCall(DefineFixed(result,
eax), instr);
1268 LInstruction* LChunkBuilder::DoCallFunction(HCallFunction* instr) {
1269 LOperand* context = UseFixed(instr->context(),
esi);
1270 LOperand*
function = UseFixed(instr->function(),
edi);
1271 argument_count_ -= instr->argument_count();
1272 LCallFunction* result =
new(zone()) LCallFunction(context,
function);
1273 return MarkAsCall(DefineFixed(result,
eax), instr);
1277 LInstruction* LChunkBuilder::DoCallRuntime(HCallRuntime* instr) {
1278 argument_count_ -= instr->argument_count();
1279 LOperand* context = UseFixed(instr->context(),
esi);
1280 return MarkAsCall(DefineFixed(
new(zone()) LCallRuntime(context),
eax), instr);
1284 LInstruction* LChunkBuilder::DoShr(HShr* instr) {
1285 return DoShift(Token::SHR, instr);
1289 LInstruction* LChunkBuilder::DoSar(HSar* instr) {
1290 return DoShift(Token::SAR, instr);
1294 LInstruction* LChunkBuilder::DoShl(HShl* instr) {
1295 return DoShift(Token::SHL, instr);
1299 LInstruction* LChunkBuilder::DoBitwise(HBitwise* instr) {
1300 if (instr->representation().IsInteger32()) {
1301 ASSERT(instr->left()->representation().IsInteger32());
1302 ASSERT(instr->right()->representation().IsInteger32());
1304 LOperand* left = UseRegisterAtStart(instr->LeastConstantOperand());
1305 LOperand* right = UseOrConstantAtStart(instr->MostConstantOperand());
1306 return DefineSameAsFirst(
new(zone()) LBitI(left, right));
1308 ASSERT(instr->representation().IsTagged());
1309 ASSERT(instr->left()->representation().IsTagged());
1310 ASSERT(instr->right()->representation().IsTagged());
1312 LOperand* context = UseFixed(instr->context(),
esi);
1313 LOperand* left = UseFixed(instr->left(),
edx);
1314 LOperand* right = UseFixed(instr->right(),
eax);
1315 LArithmeticT* result =
1316 new(zone()) LArithmeticT(instr->op(), context, left, right);
1317 return MarkAsCall(DefineFixed(result,
eax), instr);
1322 LInstruction* LChunkBuilder::DoBitNot(HBitNot* instr) {
1323 ASSERT(instr->value()->representation().IsInteger32());
1324 ASSERT(instr->representation().IsInteger32());
1325 if (instr->HasNoUses())
return NULL;
1326 LOperand* input = UseRegisterAtStart(instr->value());
1327 LBitNotI* result =
new(zone()) LBitNotI(input);
1328 return DefineSameAsFirst(result);
1332 LInstruction* LChunkBuilder::DoDiv(HDiv* instr) {
1333 if (instr->representation().IsDouble()) {
1335 }
else if (instr->representation().IsInteger32()) {
1338 LOperand* temp = FixedTemp(
edx);
1339 LOperand* dividend = UseFixed(instr->left(),
eax);
1340 LOperand* divisor = UseRegister(instr->right());
1341 LDivI* result =
new(zone()) LDivI(dividend, divisor, temp);
1342 return AssignEnvironment(DefineFixed(result,
eax));
1344 ASSERT(instr->representation().IsTagged());
1350 LInstruction* LChunkBuilder::DoMathFloorOfDiv(HMathFloorOfDiv* instr) {
1356 LInstruction* LChunkBuilder::DoMod(HMod* instr) {
1357 if (instr->representation().IsInteger32()) {
1358 ASSERT(instr->left()->representation().IsInteger32());
1359 ASSERT(instr->right()->representation().IsInteger32());
1361 LInstruction* result;
1362 if (instr->HasPowerOf2Divisor()) {
1364 LOperand* value = UseRegisterAtStart(instr->left());
1366 new(zone()) LModI(value, UseOrConstant(instr->right()),
NULL);
1367 result = DefineSameAsFirst(mod);
1371 LOperand* temp = FixedTemp(
edx);
1372 LOperand* value = UseFixed(instr->left(),
eax);
1373 LOperand* divisor = UseRegister(instr->right());
1374 LModI* mod =
new(zone()) LModI(value, divisor, temp);
1375 result = DefineFixed(mod,
edx);
1380 ? AssignEnvironment(result)
1382 }
else if (instr->representation().IsTagged()) {
1383 return DoArithmeticT(Token::MOD, instr);
1385 ASSERT(instr->representation().IsDouble());
1389 LOperand* left = UseFixedDouble(instr->left(),
xmm2);
1390 LOperand* right = UseFixedDouble(instr->right(),
xmm1);
1391 LArithmeticD* result =
new(zone()) LArithmeticD(Token::MOD, left, right);
1392 return MarkAsCall(DefineFixedDouble(result,
xmm1), instr);
1397 LInstruction* LChunkBuilder::DoMul(HMul* instr) {
1398 if (instr->representation().IsInteger32()) {
1399 ASSERT(instr->left()->representation().IsInteger32());
1400 ASSERT(instr->right()->representation().IsInteger32());
1401 LOperand* left = UseRegisterAtStart(instr->LeastConstantOperand());
1402 LOperand* right = UseOrConstant(instr->MostConstantOperand());
1403 LOperand* temp =
NULL;
1405 temp = TempRegister();
1407 LMulI* mul =
new(zone()) LMulI(left, right, temp);
1410 AssignEnvironment(mul);
1412 return DefineSameAsFirst(mul);
1413 }
else if (instr->representation().IsDouble()) {
1416 ASSERT(instr->representation().IsTagged());
1422 LInstruction* LChunkBuilder::DoSub(HSub* instr) {
1423 if (instr->representation().IsInteger32()) {
1424 ASSERT(instr->left()->representation().IsInteger32());
1425 ASSERT(instr->right()->representation().IsInteger32());
1426 LOperand* left = UseRegisterAtStart(instr->left());
1427 LOperand* right = UseOrConstantAtStart(instr->right());
1428 LSubI* sub =
new(zone()) LSubI(left, right);
1429 LInstruction* result = DefineSameAsFirst(sub);
1431 result = AssignEnvironment(result);
1434 }
else if (instr->representation().IsDouble()) {
1437 ASSERT(instr->representation().IsTagged());
1443 LInstruction* LChunkBuilder::DoAdd(HAdd* instr) {
1444 if (instr->representation().IsInteger32()) {
1445 ASSERT(instr->left()->representation().IsInteger32());
1446 ASSERT(instr->right()->representation().IsInteger32());
1447 LOperand* left = UseRegisterAtStart(instr->LeastConstantOperand());
1448 LOperand* right = UseOrConstantAtStart(instr->MostConstantOperand());
1449 LAddI* add =
new(zone()) LAddI(left, right);
1450 LInstruction* result = DefineSameAsFirst(add);
1452 result = AssignEnvironment(result);
1455 }
else if (instr->representation().IsDouble()) {
1458 ASSERT(instr->representation().IsTagged());
1464 LInstruction* LChunkBuilder::DoPower(HPower* instr) {
1465 ASSERT(instr->representation().IsDouble());
1468 Representation exponent_type = instr->right()->representation();
1469 ASSERT(instr->left()->representation().IsDouble());
1470 LOperand* left = UseFixedDouble(instr->left(),
xmm2);
1471 LOperand* right = exponent_type.IsDouble() ?
1472 UseFixedDouble(instr->right(),
xmm1) :
1473 UseFixed(instr->right(),
eax);
1474 LPower* result =
new(zone()) LPower(left, right);
1475 return MarkAsCall(DefineFixedDouble(result,
xmm3), instr,
1476 CAN_DEOPTIMIZE_EAGERLY);
1480 LInstruction* LChunkBuilder::DoRandom(HRandom* instr) {
1481 ASSERT(instr->representation().IsDouble());
1482 ASSERT(instr->global_object()->representation().IsTagged());
1483 LOperand* global_object = UseFixed(instr->global_object(),
eax);
1484 LRandom* result =
new(zone()) LRandom(global_object);
1485 return MarkAsCall(DefineFixedDouble(result,
xmm1), instr);
1489 LInstruction* LChunkBuilder::DoCompareGeneric(HCompareGeneric* instr) {
1490 ASSERT(instr->left()->representation().IsTagged());
1491 ASSERT(instr->right()->representation().IsTagged());
1492 LOperand* context = UseFixed(instr->context(),
esi);
1493 LOperand* left = UseFixed(instr->left(),
edx);
1494 LOperand* right = UseFixed(instr->right(),
eax);
1495 LCmpT* result =
new(zone()) LCmpT(context, left, right);
1496 return MarkAsCall(DefineFixed(result,
eax), instr);
1500 LInstruction* LChunkBuilder::DoCompareIDAndBranch(
1501 HCompareIDAndBranch* instr) {
1502 Representation r = instr->GetInputRepresentation();
1503 if (r.IsInteger32()) {
1504 ASSERT(instr->left()->representation().IsInteger32());
1505 ASSERT(instr->right()->representation().IsInteger32());
1506 LOperand* left = UseRegisterOrConstantAtStart(instr->left());
1507 LOperand* right = UseOrConstantAtStart(instr->right());
1508 return new(zone()) LCmpIDAndBranch(left, right);
1511 ASSERT(instr->left()->representation().IsDouble());
1512 ASSERT(instr->right()->representation().IsDouble());
1515 if (instr->left()->IsConstant() && instr->right()->IsConstant()) {
1516 left = UseRegisterOrConstantAtStart(instr->left());
1517 right = UseRegisterOrConstantAtStart(instr->right());
1519 left = UseRegisterAtStart(instr->left());
1520 right = UseRegisterAtStart(instr->right());
1522 return new(zone()) LCmpIDAndBranch(left, right);
1527 LInstruction* LChunkBuilder::DoCompareObjectEqAndBranch(
1528 HCompareObjectEqAndBranch* instr) {
1529 LOperand* left = UseRegisterAtStart(instr->left());
1530 LOperand* right = UseAtStart(instr->right());
1531 return new(zone()) LCmpObjectEqAndBranch(left, right);
1535 LInstruction* LChunkBuilder::DoCompareConstantEqAndBranch(
1536 HCompareConstantEqAndBranch* instr) {
1537 return new(zone()) LCmpConstantEqAndBranch(
1538 UseRegisterAtStart(instr->value()));
1542 LInstruction* LChunkBuilder::DoIsNilAndBranch(HIsNilAndBranch* instr) {
1545 return new(zone()) LIsNilAndBranch(UseRegisterAtStart(instr->value()), temp);
1549 LInstruction* LChunkBuilder::DoIsObjectAndBranch(HIsObjectAndBranch* instr) {
1550 ASSERT(instr->value()->representation().IsTagged());
1551 LOperand* temp = TempRegister();
1552 return new(zone()) LIsObjectAndBranch(UseRegister(instr->value()), temp);
1556 LInstruction* LChunkBuilder::DoIsStringAndBranch(HIsStringAndBranch* instr) {
1557 ASSERT(instr->value()->representation().IsTagged());
1558 LOperand* temp = TempRegister();
1559 return new(zone()) LIsStringAndBranch(UseRegister(instr->value()), temp);
1563 LInstruction* LChunkBuilder::DoIsSmiAndBranch(HIsSmiAndBranch* instr) {
1564 ASSERT(instr->value()->representation().IsTagged());
1565 return new(zone()) LIsSmiAndBranch(Use(instr->value()));
1569 LInstruction* LChunkBuilder::DoIsUndetectableAndBranch(
1570 HIsUndetectableAndBranch* instr) {
1571 ASSERT(instr ->value()->representation().IsTagged());
1572 return new(zone()) LIsUndetectableAndBranch(
1573 UseRegisterAtStart(instr->value()), TempRegister());
1577 LInstruction* LChunkBuilder::DoStringCompareAndBranch(
1578 HStringCompareAndBranch* instr) {
1579 ASSERT(instr->left()->representation().IsTagged());
1580 ASSERT(instr->right()->representation().IsTagged());
1581 LOperand* context = UseFixed(instr->context(),
esi);
1582 LOperand* left = UseFixed(instr->left(),
edx);
1583 LOperand* right = UseFixed(instr->right(),
eax);
1585 LStringCompareAndBranch* result =
new(zone())
1586 LStringCompareAndBranch(context, left, right);
1588 return MarkAsCall(result, instr);
1592 LInstruction* LChunkBuilder::DoHasInstanceTypeAndBranch(
1593 HHasInstanceTypeAndBranch* instr) {
1594 ASSERT(instr->value()->representation().IsTagged());
1595 return new(zone()) LHasInstanceTypeAndBranch(
1596 UseRegisterAtStart(instr->value()),
1601 LInstruction* LChunkBuilder::DoGetCachedArrayIndex(
1602 HGetCachedArrayIndex* instr) {
1603 ASSERT(instr->value()->representation().IsTagged());
1604 LOperand* value = UseRegisterAtStart(instr->value());
1606 return DefineAsRegister(
new(zone()) LGetCachedArrayIndex(value));
1610 LInstruction* LChunkBuilder::DoHasCachedArrayIndexAndBranch(
1611 HHasCachedArrayIndexAndBranch* instr) {
1612 ASSERT(instr->value()->representation().IsTagged());
1613 return new(zone()) LHasCachedArrayIndexAndBranch(
1614 UseRegisterAtStart(instr->value()));
1618 LInstruction* LChunkBuilder::DoClassOfTestAndBranch(
1619 HClassOfTestAndBranch* instr) {
1620 ASSERT(instr->value()->representation().IsTagged());
1621 return new(zone()) LClassOfTestAndBranch(UseRegister(instr->value()),
1627 LInstruction* LChunkBuilder::DoJSArrayLength(HJSArrayLength* instr) {
1628 LOperand* array = UseRegisterAtStart(instr->value());
1629 return DefineAsRegister(
new(zone()) LJSArrayLength(array));
1633 LInstruction* LChunkBuilder::DoFixedArrayBaseLength(
1634 HFixedArrayBaseLength* instr) {
1635 LOperand* array = UseRegisterAtStart(instr->value());
1636 return DefineAsRegister(
new(zone()) LFixedArrayBaseLength(array));
1640 LInstruction* LChunkBuilder::DoElementsKind(HElementsKind* instr) {
1641 LOperand*
object = UseRegisterAtStart(instr->value());
1642 return DefineAsRegister(
new(zone()) LElementsKind(
object));
1646 LInstruction* LChunkBuilder::DoValueOf(HValueOf* instr) {
1647 LOperand*
object = UseRegister(instr->value());
1648 LValueOf* result =
new(zone()) LValueOf(
object, TempRegister());
1649 return DefineSameAsFirst(result);
1653 LInstruction* LChunkBuilder::DoDateField(HDateField* instr) {
1654 LOperand* date = UseFixed(instr->value(),
eax);
1655 LDateField* result =
1656 new(zone()) LDateField(date, FixedTemp(
ecx), instr->index());
1657 return MarkAsCall(DefineFixed(result,
eax), instr);
1661 LInstruction* LChunkBuilder::DoBoundsCheck(HBoundsCheck* instr) {
1662 return AssignEnvironment(
new(zone()) LBoundsCheck(
1663 UseRegisterOrConstantAtStart(instr->index()),
1664 UseAtStart(instr->length())));
1668 LInstruction* LChunkBuilder::DoAbnormalExit(HAbnormalExit* instr) {
1675 LInstruction* LChunkBuilder::DoThrow(HThrow* instr) {
1676 LOperand* context = UseFixed(instr->context(),
esi);
1677 LOperand* value = UseFixed(instr->value(),
eax);
1678 return MarkAsCall(
new(zone()) LThrow(context, value), instr);
1682 LInstruction* LChunkBuilder::DoUseConst(HUseConst* instr) {
1687 LInstruction* LChunkBuilder::DoForceRepresentation(HForceRepresentation* bad) {
1695 LInstruction* LChunkBuilder::DoChange(HChange* instr) {
1696 Representation from = instr->from();
1697 Representation to = instr->to();
1698 if (from.IsTagged()) {
1699 if (to.IsDouble()) {
1700 LOperand* value = UseRegister(instr->value());
1702 LOperand* temp = instr->deoptimize_on_minus_zero()
1705 LNumberUntagD* res =
new(zone()) LNumberUntagD(value, temp);
1706 return AssignEnvironment(DefineAsRegister(res));
1708 ASSERT(to.IsInteger32());
1709 LOperand* value = UseRegister(instr->value());
1710 if (instr->value()->type().IsSmi()) {
1711 return DefineSameAsFirst(
new(zone()) LSmiUntag(value,
false));
1713 bool truncating = instr->CanTruncateToInt32();
1714 LOperand* xmm_temp =
1718 LTaggedToI* res =
new(zone()) LTaggedToI(value, xmm_temp);
1719 return AssignEnvironment(DefineSameAsFirst(res));
1722 }
else if (from.IsDouble()) {
1723 if (to.IsTagged()) {
1724 LOperand* value = UseRegister(instr->value());
1725 LOperand* temp = TempRegister();
1728 LUnallocated* result_temp = TempRegister();
1729 LNumberTagD* result =
new(zone()) LNumberTagD(value, temp);
1730 return AssignPointerMap(Define(result, result_temp));
1732 ASSERT(to.IsInteger32());
1733 bool truncating = instr->CanTruncateToInt32();
1735 LOperand* value = needs_temp ?
1736 UseTempRegister(instr->value()) : UseRegister(instr->value());
1737 LOperand* temp = needs_temp ? TempRegister() :
NULL;
1738 return AssignEnvironment(
1739 DefineAsRegister(
new(zone()) LDoubleToI(value, temp)));
1741 }
else if (from.IsInteger32()) {
1742 if (to.IsTagged()) {
1743 HValue* val = instr->value();
1744 LOperand* value = UseRegister(val);
1745 if (val->HasRange() && val->range()->IsInSmiRange()) {
1746 return DefineSameAsFirst(
new(zone()) LSmiTag(value));
1748 LNumberTagI* result =
new(zone()) LNumberTagI(value);
1749 return AssignEnvironment(AssignPointerMap(DefineSameAsFirst(result)));
1753 return DefineAsRegister(
1754 new(zone()) LInteger32ToDouble(Use(instr->value())));
1762 LInstruction* LChunkBuilder::DoCheckNonSmi(HCheckNonSmi* instr) {
1763 LOperand* value = UseAtStart(instr->value());
1764 return AssignEnvironment(
new(zone()) LCheckNonSmi(value));
1768 LInstruction* LChunkBuilder::DoCheckInstanceType(HCheckInstanceType* instr) {
1769 LOperand* value = UseRegisterAtStart(instr->value());
1770 LOperand* temp = TempRegister();
1771 LCheckInstanceType* result =
new(zone()) LCheckInstanceType(value, temp);
1772 return AssignEnvironment(result);
1776 LInstruction* LChunkBuilder::DoCheckPrototypeMaps(HCheckPrototypeMaps* instr) {
1777 LOperand* temp = TempRegister();
1778 LCheckPrototypeMaps* result =
new(zone()) LCheckPrototypeMaps(temp);
1779 return AssignEnvironment(result);
1783 LInstruction* LChunkBuilder::DoCheckSmi(HCheckSmi* instr) {
1784 LOperand* value = UseAtStart(instr->value());
1785 return AssignEnvironment(
new(zone()) LCheckSmi(value));
1789 LInstruction* LChunkBuilder::DoCheckFunction(HCheckFunction* instr) {
1794 LOperand* value = Isolate::Current()->heap()->InNewSpace(*instr->target())
1795 ? UseRegisterAtStart(instr->value())
1796 : UseAtStart(instr->value());
1797 return AssignEnvironment(
new(zone()) LCheckFunction(value));
1801 LInstruction* LChunkBuilder::DoCheckMaps(HCheckMaps* instr) {
1802 LOperand* value = UseRegisterAtStart(instr->value());
1803 LCheckMaps* result =
new(zone()) LCheckMaps(value);
1804 return AssignEnvironment(result);
1808 LInstruction* LChunkBuilder::DoClampToUint8(HClampToUint8* instr) {
1809 HValue* value = instr->value();
1810 Representation input_rep = value->representation();
1811 if (input_rep.IsDouble()) {
1812 LOperand* reg = UseRegister(value);
1813 return DefineAsRegister(
new(zone()) LClampDToUint8(reg));
1814 }
else if (input_rep.IsInteger32()) {
1815 LOperand* reg = UseFixed(value,
eax);
1816 return DefineFixed(
new(zone()) LClampIToUint8(reg),
eax);
1818 ASSERT(input_rep.IsTagged());
1819 LOperand* reg = UseFixed(value,
eax);
1822 LOperand* temp = FixedTemp(
xmm1);
1823 LClampTToUint8* result =
new(zone()) LClampTToUint8(reg, temp);
1824 return AssignEnvironment(DefineFixed(result,
eax));
1829 LInstruction* LChunkBuilder::DoReturn(HReturn* instr) {
1830 return new(zone()) LReturn(UseFixed(instr->value(),
eax));
1834 LInstruction* LChunkBuilder::DoConstant(HConstant* instr) {
1835 Representation r = instr->representation();
1836 if (r.IsInteger32()) {
1837 return DefineAsRegister(
new(zone()) LConstantI);
1838 }
else if (r.IsDouble()) {
1839 double value = instr->DoubleValue();
1840 LOperand* temp = (BitCast<uint64_t, double>(value) != 0)
1843 return DefineAsRegister(
new(zone()) LConstantD(temp));
1844 }
else if (r.IsTagged()) {
1845 return DefineAsRegister(
new(zone()) LConstantT);
1853 LInstruction* LChunkBuilder::DoLoadGlobalCell(HLoadGlobalCell* instr) {
1854 LLoadGlobalCell* result =
new(zone()) LLoadGlobalCell;
1855 return instr->RequiresHoleCheck()
1856 ? AssignEnvironment(DefineAsRegister(result))
1857 : DefineAsRegister(result);
1861 LInstruction* LChunkBuilder::DoLoadGlobalGeneric(HLoadGlobalGeneric* instr) {
1862 LOperand* context = UseFixed(instr->context(),
esi);
1863 LOperand* global_object = UseFixed(instr->global_object(),
edx);
1864 LLoadGlobalGeneric* result =
1865 new(zone()) LLoadGlobalGeneric(context, global_object);
1866 return MarkAsCall(DefineFixed(result,
eax), instr);
1870 LInstruction* LChunkBuilder::DoStoreGlobalCell(HStoreGlobalCell* instr) {
1871 LStoreGlobalCell* result =
1872 new(zone()) LStoreGlobalCell(UseRegister(instr->value()));
1873 return instr->RequiresHoleCheck() ? AssignEnvironment(result) : result;
1877 LInstruction* LChunkBuilder::DoStoreGlobalGeneric(HStoreGlobalGeneric* instr) {
1878 LOperand* context = UseFixed(instr->context(),
esi);
1879 LOperand* global_object = UseFixed(instr->global_object(),
edx);
1880 LOperand* value = UseFixed(instr->value(),
eax);
1881 LStoreGlobalGeneric* result =
1882 new(zone()) LStoreGlobalGeneric(context, global_object, value);
1883 return MarkAsCall(result, instr);
1887 LInstruction* LChunkBuilder::DoLoadContextSlot(HLoadContextSlot* instr) {
1888 LOperand* context = UseRegisterAtStart(instr->value());
1889 LInstruction* result =
1890 DefineAsRegister(
new(zone()) LLoadContextSlot(context));
1891 return instr->RequiresHoleCheck() ? AssignEnvironment(result) : result;
1895 LInstruction* LChunkBuilder::DoStoreContextSlot(HStoreContextSlot* instr) {
1898 LOperand* context = UseRegister(instr->context());
1899 if (instr->NeedsWriteBarrier()) {
1900 value = UseTempRegister(instr->value());
1901 temp = TempRegister();
1903 value = UseRegister(instr->value());
1906 LInstruction* result =
new(zone()) LStoreContextSlot(context, value, temp);
1907 return instr->RequiresHoleCheck() ? AssignEnvironment(result) : result;
1911 LInstruction* LChunkBuilder::DoLoadNamedField(HLoadNamedField* instr) {
1912 ASSERT(instr->representation().IsTagged());
1913 LOperand* obj = UseRegisterAtStart(instr->object());
1914 return DefineAsRegister(
new(zone()) LLoadNamedField(obj));
1918 LInstruction* LChunkBuilder::DoLoadNamedFieldPolymorphic(
1919 HLoadNamedFieldPolymorphic* instr) {
1920 ASSERT(instr->representation().IsTagged());
1921 if (instr->need_generic()) {
1922 LOperand* context = UseFixed(instr->context(),
esi);
1923 LOperand* obj = UseFixed(instr->object(),
edx);
1924 LLoadNamedFieldPolymorphic* result =
1925 new(zone()) LLoadNamedFieldPolymorphic(context, obj);
1926 return MarkAsCall(DefineFixed(result,
eax), instr);
1928 LOperand* context = UseAny(instr->context());
1929 LOperand* obj = UseRegisterAtStart(instr->object());
1930 LLoadNamedFieldPolymorphic* result =
1931 new(zone()) LLoadNamedFieldPolymorphic(context, obj);
1932 return AssignEnvironment(DefineAsRegister(result));
1937 LInstruction* LChunkBuilder::DoLoadNamedGeneric(HLoadNamedGeneric* instr) {
1938 LOperand* context = UseFixed(instr->context(),
esi);
1939 LOperand*
object = UseFixed(instr->object(),
edx);
1940 LLoadNamedGeneric* result =
new(zone()) LLoadNamedGeneric(context,
object);
1941 return MarkAsCall(DefineFixed(result,
eax), instr);
1945 LInstruction* LChunkBuilder::DoLoadFunctionPrototype(
1946 HLoadFunctionPrototype* instr) {
1947 return AssignEnvironment(DefineAsRegister(
1948 new(zone()) LLoadFunctionPrototype(UseRegister(instr->function()),
1953 LInstruction* LChunkBuilder::DoLoadElements(HLoadElements* instr) {
1954 LOperand* input = UseRegisterAtStart(instr->value());
1955 return DefineAsRegister(
new(zone()) LLoadElements(input));
1959 LInstruction* LChunkBuilder::DoLoadExternalArrayPointer(
1960 HLoadExternalArrayPointer* instr) {
1961 LOperand* input = UseRegisterAtStart(instr->value());
1962 return DefineAsRegister(
new(zone()) LLoadExternalArrayPointer(input));
1966 LInstruction* LChunkBuilder::DoLoadKeyedFastElement(
1967 HLoadKeyedFastElement* instr) {
1968 ASSERT(instr->representation().IsTagged());
1969 ASSERT(instr->key()->representation().IsInteger32());
1970 LOperand* obj = UseRegisterAtStart(instr->object());
1971 LOperand* key = UseRegisterOrConstantAtStart(instr->key());
1972 LLoadKeyedFastElement* result =
new(zone()) LLoadKeyedFastElement(obj, key);
1973 if (instr->RequiresHoleCheck()) AssignEnvironment(result);
1974 return DefineAsRegister(result);
1978 LInstruction* LChunkBuilder::DoLoadKeyedFastDoubleElement(
1979 HLoadKeyedFastDoubleElement* instr) {
1980 ASSERT(instr->representation().IsDouble());
1981 ASSERT(instr->key()->representation().IsInteger32());
1982 LOperand* elements = UseRegisterAtStart(instr->elements());
1983 LOperand* key = UseRegisterOrConstantAtStart(instr->key());
1984 LLoadKeyedFastDoubleElement* result =
1985 new(zone()) LLoadKeyedFastDoubleElement(elements, key);
1986 return AssignEnvironment(DefineAsRegister(result));
1990 LInstruction* LChunkBuilder::DoLoadKeyedSpecializedArrayElement(
1991 HLoadKeyedSpecializedArrayElement* instr) {
1994 (instr->representation().IsInteger32() &&
1997 (instr->representation().IsDouble() &&
2000 ASSERT(instr->key()->representation().IsInteger32());
2001 LOperand* external_pointer = UseRegister(instr->external_pointer());
2002 LOperand* key = UseRegisterOrConstant(instr->key());
2003 LLoadKeyedSpecializedArrayElement* result =
2004 new(zone()) LLoadKeyedSpecializedArrayElement(external_pointer, key);
2005 LInstruction* load_instr = DefineAsRegister(result);
2009 ? AssignEnvironment(load_instr)
2014 LInstruction* LChunkBuilder::DoLoadKeyedGeneric(HLoadKeyedGeneric* instr) {
2015 LOperand* context = UseFixed(instr->context(),
esi);
2016 LOperand*
object = UseFixed(instr->object(),
edx);
2017 LOperand* key = UseFixed(instr->key(),
ecx);
2019 LLoadKeyedGeneric* result =
2020 new(zone()) LLoadKeyedGeneric(context,
object, key);
2021 return MarkAsCall(DefineFixed(result,
eax), instr);
2025 LInstruction* LChunkBuilder::DoStoreKeyedFastElement(
2026 HStoreKeyedFastElement* instr) {
2027 bool needs_write_barrier = instr->NeedsWriteBarrier();
2028 ASSERT(instr->value()->representation().IsTagged());
2029 ASSERT(instr->object()->representation().IsTagged());
2030 ASSERT(instr->key()->representation().IsInteger32());
2032 LOperand* obj = UseRegister(instr->object());
2033 LOperand* val = needs_write_barrier
2034 ? UseTempRegister(instr->value())
2035 : UseRegisterAtStart(instr->value());
2036 LOperand* key = needs_write_barrier
2037 ? UseTempRegister(instr->key())
2038 : UseRegisterOrConstantAtStart(instr->key());
2039 return new(zone()) LStoreKeyedFastElement(obj, key, val);
2043 LInstruction* LChunkBuilder::DoStoreKeyedFastDoubleElement(
2044 HStoreKeyedFastDoubleElement* instr) {
2045 ASSERT(instr->value()->representation().IsDouble());
2046 ASSERT(instr->elements()->representation().IsTagged());
2047 ASSERT(instr->key()->representation().IsInteger32());
2049 LOperand* elements = UseRegisterAtStart(instr->elements());
2050 LOperand* val = UseTempRegister(instr->value());
2051 LOperand* key = UseRegisterOrConstantAtStart(instr->key());
2053 return new(zone()) LStoreKeyedFastDoubleElement(elements, key, val);
2057 LInstruction* LChunkBuilder::DoStoreKeyedSpecializedArrayElement(
2058 HStoreKeyedSpecializedArrayElement* instr) {
2061 (instr->value()->representation().IsInteger32() &&
2064 (instr->value()->representation().IsDouble() &&
2067 ASSERT(instr->external_pointer()->representation().IsExternal());
2068 ASSERT(instr->key()->representation().IsInteger32());
2070 LOperand* external_pointer = UseRegister(instr->external_pointer());
2071 LOperand* key = UseRegisterOrConstant(instr->key());
2072 LOperand* val =
NULL;
2077 val = UseFixed(instr->value(),
eax);
2079 val = UseRegister(instr->value());
2082 return new(zone()) LStoreKeyedSpecializedArrayElement(external_pointer,
2088 LInstruction* LChunkBuilder::DoStoreKeyedGeneric(HStoreKeyedGeneric* instr) {
2089 LOperand* context = UseFixed(instr->context(),
esi);
2090 LOperand*
object = UseFixed(instr->object(),
edx);
2091 LOperand* key = UseFixed(instr->key(),
ecx);
2092 LOperand* value = UseFixed(instr->value(),
eax);
2094 ASSERT(instr->object()->representation().IsTagged());
2095 ASSERT(instr->key()->representation().IsTagged());
2096 ASSERT(instr->value()->representation().IsTagged());
2098 LStoreKeyedGeneric* result =
2099 new(zone()) LStoreKeyedGeneric(context,
object, key, value);
2100 return MarkAsCall(result, instr);
2104 LInstruction* LChunkBuilder::DoTransitionElementsKind(
2105 HTransitionElementsKind* instr) {
2106 ElementsKind from_kind = instr->original_map()->elements_kind();
2107 ElementsKind to_kind = instr->transitioned_map()->elements_kind();
2109 LOperand*
object = UseRegister(instr->object());
2110 LOperand* new_map_reg = TempRegister();
2111 LOperand* temp_reg = TempRegister();
2112 LTransitionElementsKind* result =
2113 new(zone()) LTransitionElementsKind(
object, new_map_reg, temp_reg);
2114 return DefineSameAsFirst(result);
2116 LOperand*
object = UseFixed(instr->object(),
eax);
2117 LOperand* fixed_object_reg = FixedTemp(
edx);
2118 LOperand* new_map_reg = FixedTemp(
ebx);
2119 LTransitionElementsKind* result =
2120 new(zone()) LTransitionElementsKind(
object,
2123 return MarkAsCall(DefineFixed(result,
eax), instr);
2128 LInstruction* LChunkBuilder::DoStoreNamedField(HStoreNamedField* instr) {
2129 bool needs_write_barrier = instr->NeedsWriteBarrier();
2130 bool needs_write_barrier_for_map = !instr->transition().is_null() &&
2131 instr->NeedsWriteBarrierForMap();
2134 if (needs_write_barrier) {
2135 obj = instr->is_in_object()
2136 ? UseRegister(instr->object())
2137 : UseTempRegister(instr->object());
2139 obj = needs_write_barrier_for_map
2140 ? UseRegister(instr->object())
2141 : UseRegisterAtStart(instr->object());
2144 LOperand* val = needs_write_barrier
2145 ? UseTempRegister(instr->value())
2146 : UseRegister(instr->value());
2150 LOperand* temp = (!instr->is_in_object() || needs_write_barrier ||
2151 needs_write_barrier_for_map) ? TempRegister() :
NULL;
2154 LOperand* temp_map = needs_write_barrier_for_map ? TempRegister() :
NULL;
2156 return new(zone()) LStoreNamedField(obj, val, temp, temp_map);
2160 LInstruction* LChunkBuilder::DoStoreNamedGeneric(HStoreNamedGeneric* instr) {
2161 LOperand* context = UseFixed(instr->context(),
esi);
2162 LOperand*
object = UseFixed(instr->object(),
edx);
2163 LOperand* value = UseFixed(instr->value(),
eax);
2165 LStoreNamedGeneric* result =
2166 new(zone()) LStoreNamedGeneric(context,
object, value);
2167 return MarkAsCall(result, instr);
2171 LInstruction* LChunkBuilder::DoStringAdd(HStringAdd* instr) {
2172 LOperand* context = UseFixed(instr->context(),
esi);
2173 LOperand* left = UseOrConstantAtStart(instr->left());
2174 LOperand* right = UseOrConstantAtStart(instr->right());
2175 LStringAdd* string_add =
new(zone()) LStringAdd(context, left, right);
2176 return MarkAsCall(DefineFixed(string_add,
eax), instr);
2180 LInstruction* LChunkBuilder::DoStringCharCodeAt(HStringCharCodeAt* instr) {
2181 LOperand*
string = UseTempRegister(instr->string());
2182 LOperand* index = UseTempRegister(instr->index());
2183 LOperand* context = UseAny(instr->context());
2184 LStringCharCodeAt* result =
2185 new(zone()) LStringCharCodeAt(context,
string, index);
2186 return AssignEnvironment(AssignPointerMap(DefineAsRegister(result)));
2190 LInstruction* LChunkBuilder::DoStringCharFromCode(HStringCharFromCode* instr) {
2191 LOperand* char_code = UseRegister(instr->value());
2192 LOperand* context = UseAny(instr->context());
2193 LStringCharFromCode* result =
2194 new(zone()) LStringCharFromCode(context, char_code);
2195 return AssignPointerMap(DefineAsRegister(result));
2199 LInstruction* LChunkBuilder::DoStringLength(HStringLength* instr) {
2200 LOperand*
string = UseRegisterAtStart(instr->value());
2201 return DefineAsRegister(
new(zone()) LStringLength(
string));
2205 LInstruction* LChunkBuilder::DoAllocateObject(HAllocateObject* instr) {
2206 LOperand* context = UseFixed(instr->context(),
esi);
2207 LOperand* temp = TempRegister();
2208 LAllocateObject* result =
new(zone()) LAllocateObject(context, temp);
2209 return AssignPointerMap(DefineAsRegister(result));
2213 LInstruction* LChunkBuilder::DoFastLiteral(HFastLiteral* instr) {
2214 LOperand* context = UseFixed(instr->context(),
esi);
2216 DefineFixed(
new(zone()) LFastLiteral(context),
eax), instr);
2220 LInstruction* LChunkBuilder::DoArrayLiteral(HArrayLiteral* instr) {
2221 LOperand* context = UseFixed(instr->context(),
esi);
2223 DefineFixed(
new(zone()) LArrayLiteral(context),
eax), instr);
2227 LInstruction* LChunkBuilder::DoObjectLiteral(HObjectLiteral* instr) {
2228 LOperand* context = UseFixed(instr->context(),
esi);
2230 DefineFixed(
new(zone()) LObjectLiteral(context),
eax), instr);
2234 LInstruction* LChunkBuilder::DoRegExpLiteral(HRegExpLiteral* instr) {
2235 LOperand* context = UseFixed(instr->context(),
esi);
2237 DefineFixed(
new(zone()) LRegExpLiteral(context),
eax), instr);
2241 LInstruction* LChunkBuilder::DoFunctionLiteral(HFunctionLiteral* instr) {
2242 LOperand* context = UseFixed(instr->context(),
esi);
2244 DefineFixed(
new(zone()) LFunctionLiteral(context),
eax), instr);
2248 LInstruction* LChunkBuilder::DoDeleteProperty(HDeleteProperty* instr) {
2249 LOperand* context = UseFixed(instr->context(),
esi);
2250 LOperand*
object = UseAtStart(instr->object());
2251 LOperand* key = UseOrConstantAtStart(instr->key());
2252 LDeleteProperty* result =
new(zone()) LDeleteProperty(context,
object, key);
2253 return MarkAsCall(DefineFixed(result,
eax), instr);
2257 LInstruction* LChunkBuilder::DoOsrEntry(HOsrEntry* instr) {
2258 allocator_->MarkAsOsrEntry();
2259 current_block_->last_environment()->set_ast_id(instr->ast_id());
2260 return AssignEnvironment(
new(zone()) LOsrEntry);
2264 LInstruction* LChunkBuilder::DoParameter(HParameter* instr) {
2265 int spill_index = chunk()->GetParameterStackSlot(instr->index());
2266 return DefineAsSpilled(
new(zone()) LParameter, spill_index);
2270 LInstruction* LChunkBuilder::DoUnknownOSRValue(HUnknownOSRValue* instr) {
2271 int spill_index = chunk()->GetNextSpillIndex(
false);
2273 Abort(
"Too many spill slots needed for OSR");
2276 return DefineAsSpilled(
new(zone()) LUnknownOSRValue, spill_index);
2280 LInstruction* LChunkBuilder::DoCallStub(HCallStub* instr) {
2281 LOperand* context = UseFixed(instr->context(),
esi);
2282 argument_count_ -= instr->argument_count();
2283 LCallStub* result =
new(zone()) LCallStub(context);
2284 return MarkAsCall(DefineFixed(result,
eax), instr);
2288 LInstruction* LChunkBuilder::DoArgumentsObject(HArgumentsObject* instr) {
2297 LInstruction* LChunkBuilder::DoAccessArgumentsAt(HAccessArgumentsAt* instr) {
2298 LOperand* arguments = UseRegister(instr->arguments());
2299 LOperand* length = UseTempRegister(instr->length());
2300 LOperand* index = Use(instr->index());
2301 LAccessArgumentsAt* result =
2302 new(zone()) LAccessArgumentsAt(arguments, length, index);
2303 return AssignEnvironment(DefineAsRegister(result));
2307 LInstruction* LChunkBuilder::DoToFastProperties(HToFastProperties* instr) {
2308 LOperand*
object = UseFixed(instr->value(),
eax);
2309 LToFastProperties* result =
new(zone()) LToFastProperties(
object);
2310 return MarkAsCall(DefineFixed(result,
eax), instr);
2314 LInstruction* LChunkBuilder::DoTypeof(HTypeof* instr) {
2315 LOperand* context = UseFixed(instr->context(),
esi);
2316 LOperand* value = UseAtStart(instr->value());
2317 LTypeof* result =
new(zone()) LTypeof(context, value);
2318 return MarkAsCall(DefineFixed(result,
eax), instr);
2322 LInstruction* LChunkBuilder::DoTypeofIsAndBranch(HTypeofIsAndBranch* instr) {
2323 return new(zone()) LTypeofIsAndBranch(UseTempRegister(instr->value()));
2327 LInstruction* LChunkBuilder::DoIsConstructCallAndBranch(
2328 HIsConstructCallAndBranch* instr) {
2329 return new(zone()) LIsConstructCallAndBranch(TempRegister());
2333 LInstruction* LChunkBuilder::DoSimulate(HSimulate* instr) {
2334 HEnvironment* env = current_block_->last_environment();
2337 env->set_ast_id(instr->ast_id());
2339 env->Drop(instr->pop_count());
2340 for (
int i = 0; i < instr->values()->length(); ++i) {
2341 HValue* value = instr->values()->at(i);
2342 if (instr->HasAssignedIndexAt(i)) {
2343 env->Bind(instr->GetAssignedIndexAt(i), value);
2352 ASSERT(pending_deoptimization_ast_id_ == instr->ast_id());
2353 LLazyBailout* lazy_bailout =
new(zone()) LLazyBailout;
2354 LInstruction* result = AssignEnvironment(lazy_bailout);
2357 instruction_pending_deoptimization_environment_->
2358 SetDeferredLazyDeoptimizationEnvironment(result->environment());
2359 instruction_pending_deoptimization_environment_ =
NULL;
2368 LInstruction* LChunkBuilder::DoStackCheck(HStackCheck* instr) {
2369 if (instr->is_function_entry()) {
2370 LOperand* context = UseFixed(instr->context(),
esi);
2371 return MarkAsCall(
new(zone()) LStackCheck(context), instr);
2373 ASSERT(instr->is_backwards_branch());
2374 LOperand* context = UseAny(instr->context());
2375 return AssignEnvironment(
2376 AssignPointerMap(
new(zone()) LStackCheck(context)));
2381 LInstruction* LChunkBuilder::DoEnterInlined(HEnterInlined* instr) {
2382 HEnvironment* outer = current_block_->last_environment();
2383 HConstant* undefined = graph()->GetConstantUndefined();
2384 HEnvironment* inner = outer->CopyForInlining(instr->closure(),
2385 instr->arguments_count(),
2389 instr->is_construct());
2390 if (instr->arguments_var() !=
NULL) {
2391 inner->Bind(instr->arguments_var(), graph()->GetArgumentsObject());
2393 current_block_->UpdateEnvironment(inner);
2394 chunk_->AddInlinedClosure(instr->closure());
2399 LInstruction* LChunkBuilder::DoLeaveInlined(HLeaveInlined* instr) {
2400 LInstruction* pop =
NULL;
2402 HEnvironment* env = current_block_->last_environment();
2404 if (instr->arguments_pushed()) {
2405 int argument_count = env->arguments_environment()->parameter_count();
2406 pop =
new(zone()) LDrop(argument_count);
2407 argument_count_ -= argument_count;
2410 HEnvironment* outer = current_block_->last_environment()->
2411 DiscardInlined(
false);
2412 current_block_->UpdateEnvironment(outer);
2417 LInstruction* LChunkBuilder::DoIn(HIn* instr) {
2418 LOperand* context = UseFixed(instr->context(),
esi);
2419 LOperand* key = UseOrConstantAtStart(instr->key());
2420 LOperand*
object = UseOrConstantAtStart(instr->object());
2421 LIn* result =
new(zone()) LIn(context, key,
object);
2422 return MarkAsCall(DefineFixed(result,
eax), instr);
2426 LInstruction* LChunkBuilder::DoForInPrepareMap(HForInPrepareMap* instr) {
2427 LOperand* context = UseFixed(instr->context(),
esi);
2428 LOperand*
object = UseFixed(instr->enumerable(),
eax);
2429 LForInPrepareMap* result =
new(zone()) LForInPrepareMap(context,
object);
2430 return MarkAsCall(DefineFixed(result,
eax), instr, CAN_DEOPTIMIZE_EAGERLY);
2434 LInstruction* LChunkBuilder::DoForInCacheArray(HForInCacheArray* instr) {
2435 LOperand* map = UseRegister(instr->map());
2436 return AssignEnvironment(DefineAsRegister(
2437 new(zone()) LForInCacheArray(map)));
2441 LInstruction* LChunkBuilder::DoCheckMapValue(HCheckMapValue* instr) {
2442 LOperand* value = UseRegisterAtStart(instr->value());
2443 LOperand* map = UseRegisterAtStart(instr->map());
2444 return AssignEnvironment(
new(zone()) LCheckMapValue(value, map));
2448 LInstruction* LChunkBuilder::DoLoadFieldByIndex(HLoadFieldByIndex* instr) {
2449 LOperand*
object = UseRegister(instr->object());
2450 LOperand* index = UseTempRegister(instr->index());
2451 return DefineSameAsFirst(
new(zone()) LLoadFieldByIndex(
object, index));
2457 #endif // V8_TARGET_ARCH_IA32
HValue * LookupValue(int id) const
#define DEFINE_COMPILE(type)
static LUnallocated * cast(LOperand *op)
static LGap * cast(LInstruction *instr)
static LConstantOperand * Create(int index, Zone *zone)
Handle< Map > transitioned_map()
virtual void PrintDataTo(StringStream *stream)
virtual void PrintDataTo(StringStream *stream)
Handle< Object > name() const
const char * ToCString(const v8::String::Utf8Value &value)
virtual LOperand * InputAt(int i)=0
int GetParameterStackSlot(int index) const
void PrintF(const char *format,...)
static String * cast(Object *obj)
virtual void PrintOutputOperandTo(StringStream *stream)
void MarkSpilledDoubleRegister(int allocation_index, LOperand *spill_operand)
LParallelMove * GetOrCreateParallelMove(InnerPosition pos, Zone *zone)
virtual void PrintDataTo(StringStream *stream)
int ParameterAt(int index)
value format" "after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false, "print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false, "print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false, "report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true, "garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true, "flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true, "use incremental marking") DEFINE_bool(incremental_marking_steps, true, "do incremental marking steps") DEFINE_bool(trace_incremental_marking, false, "trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true, "Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false, "Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true, "use inline caching") DEFINE_bool(native_code_counters, false, "generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false, "Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true, "Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false, "Never perform compaction on full GC-testing only") DEFINE_bool(compact_code_space, true, "Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true, "Flush inline caches prior to mark compact collection and" "flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0, "Default seed for initializing random generator" "(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true, "allows verbose printing") DEFINE_bool(allow_natives_syntax, false, "allow natives syntax") DEFINE_bool(trace_sim, false, "Trace simulator execution") DEFINE_bool(check_icache, false, "Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0, "Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8, "Stack alingment in bytes in simulator(4 or 8, 8 is default)") DEFINE_bool(trace_exception, false, "print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false, "preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true, "randomize hashes to avoid predictable hash collisions" "(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0, "Fixed seed to use to hash property keys(0 means random)" "(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false, "activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true, "generate optimized regexp code") DEFINE_bool(testing_bool_flag, true, "testing_bool_flag") DEFINE_int(testing_int_flag, 13, "testing_int_flag") DEFINE_float(testing_float_flag, 2.5, "float-flag") DEFINE_string(testing_string_flag, "Hello, world!", "string-flag") DEFINE_int(testing_prng_seed, 42, "Seed used for threading test randomness") DEFINE_string(testing_serialization_file, "/tmp/serdes", "file in which to serialize heap") DEFINE_bool(help, false, "Print usage message, including flags, on console") DEFINE_bool(dump_counters, false, "Dump counters on exit") DEFINE_string(map_counters, "", "Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT, "Pass all remaining arguments to the script.Alias for\"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#43"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2#define FLAG_MODE_DEFINE_DEFAULTS#1"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flag-definitions.h"1#define FLAG_FULL(ftype, ctype, nam, def, cmt)#define FLAG_READONLY(ftype, ctype, nam, def, cmt)#define DEFINE_implication(whenflag, thenflag)#define DEFINE_bool(nam, def, cmt)#define DEFINE_int(nam, def, cmt)#define DEFINE_float(nam, def, cmt)#define DEFINE_string(nam, def, cmt)#define DEFINE_args(nam, def, cmt)#define FLAG DEFINE_bool(use_strict, false,"enforce strict mode") DEFINE_bool(es5_readonly, false,"activate correct semantics for inheriting readonliness") DEFINE_bool(es52_globals, false,"activate new semantics for global var declarations") DEFINE_bool(harmony_typeof, false,"enable harmony semantics for typeof") DEFINE_bool(harmony_scoping, false,"enable harmony block scoping") DEFINE_bool(harmony_modules, false,"enable harmony modules (implies block scoping)") DEFINE_bool(harmony_proxies, false,"enable harmony proxies") DEFINE_bool(harmony_collections, false,"enable harmony collections (sets, maps, and weak maps)") DEFINE_bool(harmony, false,"enable all harmony features (except typeof)") DEFINE_implication(harmony, harmony_scoping) DEFINE_implication(harmony, harmony_modules) DEFINE_implication(harmony, harmony_proxies) DEFINE_implication(harmony, harmony_collections) DEFINE_implication(harmony_modules, harmony_scoping) DEFINE_bool(packed_arrays, false,"optimizes arrays that have no holes") DEFINE_bool(smi_only_arrays, true,"tracks arrays with only smi values") DEFINE_bool(clever_optimizations, true,"Optimize object size, Array shift, DOM strings and string +") DEFINE_bool(unbox_double_arrays, true,"automatically unbox arrays of doubles") DEFINE_bool(string_slices, true,"use string slices") DEFINE_bool(crankshaft, true,"use crankshaft") DEFINE_string(hydrogen_filter,"","optimization filter") DEFINE_bool(use_range, true,"use hydrogen range analysis") DEFINE_bool(eliminate_dead_phis, true,"eliminate dead phis") DEFINE_bool(use_gvn, true,"use hydrogen global value numbering") DEFINE_bool(use_canonicalizing, true,"use hydrogen instruction canonicalizing") DEFINE_bool(use_inlining, true,"use function inlining") DEFINE_int(max_inlined_source_size, 600,"maximum source size in bytes considered for a single inlining") DEFINE_int(max_inlined_nodes, 196,"maximum number of AST nodes considered for a single inlining") DEFINE_int(max_inlined_nodes_cumulative, 196,"maximum cumulative number of AST nodes considered for inlining") DEFINE_bool(loop_invariant_code_motion, true,"loop invariant code motion") DEFINE_bool(collect_megamorphic_maps_from_stub_cache, true,"crankshaft harvests type feedback from stub cache") DEFINE_bool(hydrogen_stats, false,"print statistics for hydrogen") DEFINE_bool(trace_hydrogen, false,"trace generated hydrogen to file") DEFINE_string(trace_phase,"Z","trace generated IR for specified phases") DEFINE_bool(trace_inlining, false,"trace inlining decisions") DEFINE_bool(trace_alloc, false,"trace register allocator") DEFINE_bool(trace_all_uses, false,"trace all use positions") DEFINE_bool(trace_range, false,"trace range analysis") DEFINE_bool(trace_gvn, false,"trace global value numbering") DEFINE_bool(trace_representation, false,"trace representation types") DEFINE_bool(stress_pointer_maps, false,"pointer map for every instruction") DEFINE_bool(stress_environments, false,"environment for every instruction") DEFINE_int(deopt_every_n_times, 0,"deoptimize every n times a deopt point is passed") DEFINE_bool(trap_on_deopt, false,"put a break point before deoptimizing") DEFINE_bool(deoptimize_uncommon_cases, true,"deoptimize uncommon cases") DEFINE_bool(polymorphic_inlining, true,"polymorphic inlining") DEFINE_bool(use_osr, true,"use on-stack replacement") DEFINE_bool(array_bounds_checks_elimination, false,"perform array bounds checks elimination") DEFINE_bool(array_index_dehoisting, false,"perform array index dehoisting") DEFINE_bool(trace_osr, false,"trace on-stack replacement") DEFINE_int(stress_runs, 0,"number of stress runs") DEFINE_bool(optimize_closures, true,"optimize closures") DEFINE_bool(inline_construct, true,"inline constructor calls") DEFINE_bool(inline_arguments, true,"inline functions with arguments object") DEFINE_int(loop_weight, 1,"loop weight for representation inference") DEFINE_bool(optimize_for_in, true,"optimize functions containing for-in loops") DEFINE_bool(experimental_profiler, true,"enable all profiler experiments") DEFINE_bool(watch_ic_patching, false,"profiler considers IC stability") DEFINE_int(frame_count, 1,"number of stack frames inspected by the profiler") DEFINE_bool(self_optimization, false,"primitive functions trigger their own optimization") DEFINE_bool(direct_self_opt, false,"call recompile stub directly when self-optimizing") DEFINE_bool(retry_self_opt, false,"re-try self-optimization if it failed") DEFINE_bool(count_based_interrupts, false,"trigger profiler ticks based on counting instead of timing") DEFINE_bool(interrupt_at_exit, false,"insert an interrupt check at function exit") DEFINE_bool(weighted_back_edges, false,"weight back edges by jump distance for interrupt triggering") DEFINE_int(interrupt_budget, 5900,"execution budget before interrupt is triggered") DEFINE_int(type_info_threshold, 15,"percentage of ICs that must have type info to allow optimization") DEFINE_int(self_opt_count, 130,"call count before self-optimization") DEFINE_implication(experimental_profiler, watch_ic_patching) DEFINE_implication(experimental_profiler, self_optimization) DEFINE_implication(experimental_profiler, retry_self_opt) DEFINE_implication(experimental_profiler, count_based_interrupts) DEFINE_implication(experimental_profiler, interrupt_at_exit) DEFINE_implication(experimental_profiler, weighted_back_edges) DEFINE_bool(trace_opt_verbose, false,"extra verbose compilation tracing") DEFINE_implication(trace_opt_verbose, trace_opt) DEFINE_bool(debug_code, false,"generate extra code (assertions) for debugging") DEFINE_bool(code_comments, false,"emit comments in code disassembly") DEFINE_bool(enable_sse2, true,"enable use of SSE2 instructions if available") DEFINE_bool(enable_sse3, true,"enable use of SSE3 instructions if available") DEFINE_bool(enable_sse4_1, true,"enable use of SSE4.1 instructions if available") DEFINE_bool(enable_cmov, true,"enable use of CMOV instruction if available") DEFINE_bool(enable_rdtsc, true,"enable use of RDTSC instruction if available") DEFINE_bool(enable_sahf, true,"enable use of SAHF instruction if available (X64 only)") DEFINE_bool(enable_vfp3, true,"enable use of VFP3 instructions if available - this implies ""enabling ARMv7 instructions (ARM only)") DEFINE_bool(enable_armv7, true,"enable use of ARMv7 instructions if available (ARM only)") DEFINE_bool(enable_fpu, true,"enable use of MIPS FPU instructions if available (MIPS only)") DEFINE_string(expose_natives_as, NULL,"expose natives in global object") DEFINE_string(expose_debug_as, NULL,"expose debug in global object") DEFINE_bool(expose_gc, false,"expose gc extension") DEFINE_bool(expose_externalize_string, false,"expose externalize string extension") DEFINE_int(stack_trace_limit, 10,"number of stack frames to capture") DEFINE_bool(builtins_in_stack_traces, false,"show built-in functions in stack traces") DEFINE_bool(disable_native_files, false,"disable builtin natives files") DEFINE_bool(inline_new, true,"use fast inline allocation") DEFINE_bool(stack_trace_on_abort, true,"print a stack trace if an assertion failure occurs") DEFINE_bool(trace, false,"trace function calls") DEFINE_bool(mask_constants_with_cookie, true,"use random jit cookie to mask large constants") DEFINE_bool(lazy, true,"use lazy compilation") DEFINE_bool(trace_opt, false,"trace lazy optimization") DEFINE_bool(trace_opt_stats, false,"trace lazy optimization statistics") DEFINE_bool(opt, true,"use adaptive optimizations") DEFINE_bool(always_opt, false,"always try to optimize functions") DEFINE_bool(prepare_always_opt, false,"prepare for turning on always opt") DEFINE_bool(trace_deopt, false,"trace deoptimization") DEFINE_int(min_preparse_length, 1024,"minimum length for automatic enable preparsing") DEFINE_bool(always_full_compiler, false,"try to use the dedicated run-once backend for all code") DEFINE_bool(trace_bailout, false,"print reasons for falling back to using the classic V8 backend") DEFINE_bool(compilation_cache, true,"enable compilation cache") DEFINE_bool(cache_prototype_transitions, true,"cache prototype transitions") DEFINE_bool(trace_debug_json, false,"trace debugging JSON request/response") DEFINE_bool(debugger_auto_break, true,"automatically set the debug break flag when debugger commands are ""in the queue") DEFINE_bool(enable_liveedit, true,"enable liveedit experimental feature") DEFINE_bool(break_on_abort, true,"always cause a debug break before aborting") DEFINE_int(stack_size, kPointerSize *123,"default size of stack region v8 is allowed to use (in kBytes)") DEFINE_int(max_stack_trace_source_length, 300,"maximum length of function source code printed in a stack trace.") DEFINE_bool(always_inline_smi_code, false,"always inline smi code in non-opt code") DEFINE_int(max_new_space_size, 0,"max size of the new generation (in kBytes)") DEFINE_int(max_old_space_size, 0,"max size of the old generation (in Mbytes)") DEFINE_int(max_executable_size, 0,"max size of executable memory (in Mbytes)") DEFINE_bool(gc_global, false,"always perform global GCs") DEFINE_int(gc_interval,-1,"garbage collect after <n> allocations") DEFINE_bool(trace_gc, false,"print one trace line following each garbage collection") DEFINE_bool(trace_gc_nvp, false,"print one detailed trace line in name=value format ""after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false,"print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false,"print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false,"report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true,"garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true,"flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true,"use incremental marking") DEFINE_bool(incremental_marking_steps, true,"do incremental marking steps") DEFINE_bool(trace_incremental_marking, false,"trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true,"Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false,"Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true,"use inline caching") DEFINE_bool(native_code_counters, false,"generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false,"Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true,"Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false,"Never perform compaction on full GC - testing only") DEFINE_bool(compact_code_space, true,"Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true,"Flush inline caches prior to mark compact collection and ""flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0,"Default seed for initializing random generator ""(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true,"allows verbose printing") DEFINE_bool(allow_natives_syntax, false,"allow natives syntax") DEFINE_bool(trace_sim, false,"Trace simulator execution") DEFINE_bool(check_icache, false,"Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0,"Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8,"Stack alingment in bytes in simulator (4 or 8, 8 is default)") DEFINE_bool(trace_exception, false,"print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false,"preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true,"randomize hashes to avoid predictable hash collisions ""(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0,"Fixed seed to use to hash property keys (0 means random)""(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false,"activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true,"generate optimized regexp code") DEFINE_bool(testing_bool_flag, true,"testing_bool_flag") DEFINE_int(testing_int_flag, 13,"testing_int_flag") DEFINE_float(testing_float_flag, 2.5,"float-flag") DEFINE_string(testing_string_flag,"Hello, world!","string-flag") DEFINE_int(testing_prng_seed, 42,"Seed used for threading test randomness") DEFINE_string(testing_serialization_file,"/tmp/serdes","file in which to serialize heap") DEFINE_bool(help, false,"Print usage message, including flags, on console") DEFINE_bool(dump_counters, false,"Dump counters on exit") DEFINE_string(map_counters,"","Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT,"Pass all remaining arguments to the script. Alias for \"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#47"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2 namespace{struct Flag{enum FlagType{TYPE_BOOL, TYPE_INT, TYPE_FLOAT, TYPE_STRING, TYPE_ARGS} name
virtual void PrintDataTo(StringStream *stream)
LLabel(HBasicBlock *block)
Handle< String > name() const
static const int kNoNumber
static const int kNumAllocatableRegisters
static bool IsSupported(CpuFeature f)
Handle< Object > name() const
LEnvironment * environment() const
#define ASSERT(condition)
virtual const char * Mnemonic() const =0
virtual void PrintDataTo(StringStream *stream)
void PrintTo(StringStream *stream)
bool IsSimpleMapChangeTransition(ElementsKind from_kind, ElementsKind to_kind)
#define LITHIUM_CONCRETE_INSTRUCTION_LIST(V)
Representation representation() const
EqualityKind kind() const
LGap * GetGapAt(int index) const
virtual void PrintDataTo(StringStream *stream)
virtual void PrintDataTo(StringStream *stream)
virtual void PrintDataTo(StringStream *stream)
virtual bool HasResult() const =0
virtual void PrintDataTo(StringStream *stream)
int GetNextSpillIndex(bool is_double)
void PrintTo(StringStream *stream)
LLabel * replacement() const
virtual const char * Mnemonic() const
virtual void PrintDataTo(StringStream *stream)
virtual void PrintDataTo(StringStream *stream)
void MarkSpilledRegister(int allocation_index, LOperand *spill_operand)
LOperand * GetNextSpillSlot(bool is_double)
void AddMove(LOperand *from, LOperand *to, Zone *zone)
static const char * String(Value tok)
static LDoubleStackSlot * Create(int index, Zone *zone)
LOperand * InputAt(int i)
virtual void PrintDataTo(StringStream *stream)
bool HasEnvironment() const
static void VPrint(const char *format, va_list args)
virtual void PrintDataTo(StringStream *stream)
virtual LOperand * result()=0
virtual void PrintDataTo(StringStream *stream)
static int ToAllocationIndex(Register reg)
virtual void PrintDataTo(StringStream *stream)
virtual void PrintTo(StringStream *stream)
virtual void PrintDataTo(StringStream *stream)
static LStackSlot * Create(int index, Zone *zone)
virtual void PrintDataTo(StringStream *stream)
static const int kMaxFixedIndex
bool IsGapAt(int index) const
LPointerMap * pointer_map() const
const ZoneList< HBasicBlock * > * blocks() const
static int ToAllocationIndex(XMMRegister reg)
virtual void PrintDataTo(StringStream *stream)
LLabel * GetLabel(int block_id) const
virtual DECLARE_CONCRETE_INSTRUCTION(StringCompareAndBranch,"string-compare-and-branch") Token void PrintDataTo(StringStream *stream)
void AddInstruction(LInstruction *instruction, HBasicBlock *block)
virtual void PrintDataTo(StringStream *stream)
void PrintDataTo(StringStream *stream) const
virtual const char * Mnemonic() const
CompilationInfo * info() const
static const int kNumAllocatableRegisters
#define ASSERT_EQ(v1, v2)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination trace on stack replacement optimize closures functions with arguments object optimize functions containing for in loops profiler considers IC stability primitive functions trigger their own optimization re try self optimization if it failed insert an interrupt check at function exit execution budget before interrupt is triggered call count before self optimization self_optimization count_based_interrupts weighted_back_edges trace_opt emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 enable use of ARMv7 instructions if enable use of MIPS FPU instructions if NULL
void AddGapMove(int index, LOperand *from, LOperand *to)
virtual void PrintDataTo(StringStream *stream)
virtual void PrintDataTo(StringStream *stream)
Handle< String > name() const
LConstantOperand * DefineConstantOperand(HConstant *constant)
virtual void PrintDataTo(StringStream *stream)
Representation LookupLiteralRepresentation(LConstantOperand *operand) const
bool HasPointerMap() const
int NearestGapPos(int index) const
virtual void PrintDataTo(StringStream *stream)
virtual int InputCount()=0
static HValue * cast(HValue *value)
Handle< String > type_literal()
void PrintTo(StringStream *stream)
Handle< Object > LookupLiteral(LConstantOperand *operand) const
virtual void PrintDataTo(StringStream *stream)
Handle< Map > original_map()
const ZoneList< LInstruction * > * instructions() const
virtual void PrintDataTo(StringStream *stream)
virtual void PrintDataTo(StringStream *stream)