30 #if defined(V8_TARGET_ARCH_IA32)
44 class SafepointGenerator :
public CallWrapper {
47 LPointerMap* pointers,
48 Safepoint::DeoptMode mode)
54 virtual void BeforeCall(
int call_size)
const {}
57 codegen_->RecordSafepoint(pointers_, deopt_mode_);
62 LPointerMap* pointers_;
63 Safepoint::DeoptMode deopt_mode_;
69 bool LCodeGen::GenerateCode() {
70 HPhase phase(
"Z_Code generation", chunk());
73 CpuFeatures::Scope scope(
SSE2);
75 CodeStub::GenerateFPStubs();
80 FrameScope frame_scope(masm_, StackFrame::MANUAL);
82 dynamic_frame_alignment_ = (chunk()->num_double_slots() > 2 &&
83 !chunk()->graph()->is_recursive()) ||
86 return GeneratePrologue() &&
88 GenerateDeferredCode() &&
89 GenerateSafepointTable();
93 void LCodeGen::FinishCode(Handle<Code> code) {
95 code->set_stack_slots(GetStackSlotCount());
96 code->set_safepoint_table_offset(safepoints_.GetCodeOffset());
97 PopulateDeoptimizationData(code);
102 void LCodeGen::Abort(
const char* format, ...) {
103 if (FLAG_trace_bailout) {
104 SmartArrayPointer<char>
name(
105 info()->shared_info()->DebugName()->
ToCString());
106 PrintF(
"Aborting LCodeGen in @\"%s\": ", *
name);
108 va_start(arguments, format);
117 void LCodeGen::Comment(
const char* format, ...) {
118 if (!FLAG_code_comments)
return;
120 StringBuilder builder(buffer,
ARRAY_SIZE(buffer));
122 va_start(arguments, format);
123 builder.AddFormattedList(format, arguments);
128 size_t length = builder.position();
130 memcpy(copy.start(), builder.Finalize(), copy.length());
131 masm()->RecordComment(copy.start());
135 bool LCodeGen::GeneratePrologue() {
139 if (strlen(FLAG_stop_at) > 0 &&
140 info_->function()->name()->IsEqualTo(
CStrVector(FLAG_stop_at))) {
149 if (!info_->is_classic_mode() || info_->is_native()) {
152 __ j(
zero, &ok, Label::kNear);
154 int receiver_offset = (scope()->num_parameters() + 1) *
kPointerSize;
155 __ mov(Operand(
esp, receiver_offset),
156 Immediate(isolate()->factory()->undefined_value()));
161 if (dynamic_frame_alignment_) {
165 Label do_not_pad, align_loop;
170 __ push(Immediate(0));
174 __ mov(
ecx, Immediate(scope()->num_parameters() + 2));
176 __ bind(&align_loop);
183 __ bind(&do_not_pad);
191 if (dynamic_frame_alignment_ && FLAG_debug_code) {
193 __ Assert(
zero,
"frame is expected to be aligned");
197 int slots = GetStackSlotCount();
200 if (dynamic_frame_alignment_) {
206 if (FLAG_debug_code) {
207 __ mov(Operand(
eax), Immediate(slots));
219 const int kPageSize = 4 *
KB;
222 offset -= kPageSize) {
229 if (dynamic_frame_alignment_) {
242 if (heap_slots > 0) {
243 Comment(
";;; Allocate local context");
247 FastNewContextStub stub(heap_slots);
250 __ CallRuntime(Runtime::kNewFunctionContext, 1);
252 RecordSafepoint(Safepoint::kNoLazyDeopt);
258 int num_parameters = scope()->num_parameters();
259 for (
int i = 0; i < num_parameters; i++) {
260 Variable* var = scope()->parameter(i);
261 if (var->IsContextSlot()) {
265 __ mov(
eax, Operand(
ebp, parameter_offset));
268 __ mov(Operand(
esi, context_offset),
eax);
270 __ RecordWriteContextSlot(
esi,
277 Comment(
";;; End allocate local context");
284 __ CallRuntime(Runtime::kTraceEnter, 0);
286 return !is_aborted();
290 bool LCodeGen::GenerateBody() {
292 bool emit_instructions =
true;
293 for (current_instruction_ = 0;
294 !is_aborted() && current_instruction_ < instructions_->length();
295 current_instruction_++) {
296 LInstruction* instr = instructions_->at(current_instruction_);
297 if (instr->IsLabel()) {
299 emit_instructions = !label->HasReplacement();
302 if (emit_instructions) {
303 Comment(
";;; @%d: %s.", current_instruction_, instr->Mnemonic());
304 instr->CompileToNative(
this);
307 EnsureSpaceForLazyDeopt();
308 return !is_aborted();
312 bool LCodeGen::GenerateDeferredCode() {
314 if (deferred_.length() > 0) {
315 for (
int i = 0; !is_aborted() && i < deferred_.length(); i++) {
316 LDeferredCode* code = deferred_[i];
317 __ bind(code->entry());
318 Comment(
";;; Deferred code @%d: %s.",
319 code->instruction_index(),
320 code->instr()->Mnemonic());
322 __ jmp(code->exit());
328 if (!is_aborted()) status_ =
DONE;
329 return !is_aborted();
333 bool LCodeGen::GenerateSafepointTable() {
335 safepoints_.Emit(masm(), GetStackSlotCount());
336 return !is_aborted();
345 XMMRegister LCodeGen::ToDoubleRegister(
int index)
const {
356 XMMRegister LCodeGen::ToDoubleRegister(LOperand* op)
const {
357 ASSERT(op->IsDoubleRegister());
358 return ToDoubleRegister(op->index());
362 int LCodeGen::ToInteger32(LConstantOperand* op)
const {
363 Handle<Object> value = chunk_->LookupLiteral(op);
364 ASSERT(chunk_->LookupLiteralRepresentation(op).IsInteger32());
365 ASSERT(static_cast<double>(static_cast<int32_t>(value->Number())) ==
367 return static_cast<int32_t>(value->Number());
371 Handle<Object> LCodeGen::ToHandle(LConstantOperand* op)
const {
372 Handle<Object> literal = chunk_->LookupLiteral(op);
373 ASSERT(chunk_->LookupLiteralRepresentation(op).IsTagged());
378 double LCodeGen::ToDouble(LConstantOperand* op)
const {
379 Handle<Object> value = chunk_->LookupLiteral(op);
380 return value->Number();
384 bool LCodeGen::IsInteger32(LConstantOperand* op)
const {
385 return chunk_->LookupLiteralRepresentation(op).IsInteger32();
389 Operand LCodeGen::ToOperand(LOperand* op)
const {
390 if (op->IsRegister())
return Operand(
ToRegister(op));
391 if (op->IsDoubleRegister())
return Operand(ToDoubleRegister(op));
392 ASSERT(op->IsStackSlot() || op->IsDoubleStackSlot());
393 int index = op->index();
405 Operand LCodeGen::HighOperand(LOperand* op) {
406 ASSERT(op->IsDoubleStackSlot());
407 int index = op->index();
408 int offset = (index >= 0) ? index + 3 : index - 1;
413 void LCodeGen::WriteTranslation(LEnvironment* environment,
414 Translation* translation) {
415 if (environment ==
NULL)
return;
418 int translation_size = environment->values()->length();
420 int height = translation_size - environment->parameter_count();
422 WriteTranslation(environment->outer(), translation);
423 int closure_id = DefineDeoptimizationLiteral(environment->closure());
424 switch (environment->frame_type()) {
426 translation->BeginJSFrame(environment->ast_id(), closure_id, height);
429 translation->BeginConstructStubFrame(closure_id, translation_size);
432 translation->BeginArgumentsAdaptorFrame(closure_id, translation_size);
437 for (
int i = 0; i < translation_size; ++i) {
438 LOperand* value = environment->values()->at(i);
441 if (environment->spilled_registers() !=
NULL && value !=
NULL) {
442 if (value->IsRegister() &&
443 environment->spilled_registers()[value->index()] !=
NULL) {
444 translation->MarkDuplicate();
445 AddToTranslation(translation,
446 environment->spilled_registers()[value->index()],
447 environment->HasTaggedValueAt(i));
449 value->IsDoubleRegister() &&
450 environment->spilled_double_registers()[value->index()] !=
NULL) {
451 translation->MarkDuplicate();
454 environment->spilled_double_registers()[value->index()],
459 AddToTranslation(translation, value, environment->HasTaggedValueAt(i));
464 void LCodeGen::AddToTranslation(Translation* translation,
471 translation->StoreArgumentsObject();
472 }
else if (op->IsStackSlot()) {
474 translation->StoreStackSlot(op->index());
476 translation->StoreInt32StackSlot(op->index());
478 }
else if (op->IsDoubleStackSlot()) {
479 translation->StoreDoubleStackSlot(op->index());
480 }
else if (op->IsArgument()) {
482 int src_index = GetStackSlotCount() + op->index();
483 translation->StoreStackSlot(src_index);
484 }
else if (op->IsRegister()) {
487 translation->StoreRegister(reg);
489 translation->StoreInt32Register(reg);
491 }
else if (op->IsDoubleRegister()) {
492 XMMRegister reg = ToDoubleRegister(op);
493 translation->StoreDoubleRegister(reg);
494 }
else if (op->IsConstantOperand()) {
496 int src_index = DefineDeoptimizationLiteral(literal);
497 translation->StoreLiteral(src_index);
504 void LCodeGen::CallCodeGeneric(Handle<Code> code,
505 RelocInfo::Mode mode,
507 SafepointMode safepoint_mode) {
509 LPointerMap* pointers = instr->pointer_map();
510 RecordPosition(pointers->position());
512 RecordSafepointWithLazyDeopt(instr, safepoint_mode);
523 void LCodeGen::CallCode(Handle<Code> code,
524 RelocInfo::Mode mode,
525 LInstruction* instr) {
526 CallCodeGeneric(code, mode, instr, RECORD_SIMPLE_SAFEPOINT);
530 void LCodeGen::CallRuntime(
const Runtime::Function* fun,
532 LInstruction* instr) {
534 ASSERT(instr->HasPointerMap());
535 LPointerMap* pointers = instr->pointer_map();
536 RecordPosition(pointers->position());
538 __ CallRuntime(fun, argc);
540 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT);
548 if (context->IsRegister()) {
552 }
else if (context->IsStackSlot()) {
553 __ mov(
esi, ToOperand(context));
554 }
else if (context->IsConstantOperand()) {
555 Handle<Object> literal =
557 __ LoadHeapObject(
esi, Handle<Context>::cast(literal));
562 __ CallRuntimeSaveDoubles(
id);
563 RecordSafepointWithRegisters(
564 instr->pointer_map(), argc, Safepoint::kNoLazyDeopt);
568 void LCodeGen::RegisterEnvironmentForDeoptimization(
569 LEnvironment* environment, Safepoint::DeoptMode mode) {
570 if (!environment->HasBeenRegistered()) {
585 int jsframe_count = 0;
586 for (LEnvironment* e = environment; e !=
NULL; e = e->outer()) {
592 Translation translation(&translations_, frame_count, jsframe_count,
594 WriteTranslation(environment, &translation);
595 int deoptimization_index = deoptimizations_.length();
596 int pc_offset = masm()->pc_offset();
597 environment->Register(deoptimization_index,
599 (mode == Safepoint::kLazyDeopt) ? pc_offset : -1);
600 deoptimizations_.Add(environment, zone());
605 void LCodeGen::DeoptimizeIf(
Condition cc, LEnvironment* environment) {
606 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt);
607 ASSERT(environment->HasBeenRegistered());
608 int id = environment->deoptimization_index();
611 Abort(
"bailout was not prepared");
615 if (FLAG_deopt_every_n_times != 0) {
616 Handle<SharedFunctionInfo> shared(info_->shared_info());
626 if (FLAG_trap_on_deopt)
__ int3();
644 if (FLAG_trap_on_deopt)
__ int3();
647 if (FLAG_trap_on_deopt) {
660 void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) {
661 int length = deoptimizations_.length();
662 if (length == 0)
return;
663 Handle<DeoptimizationInputData> data =
664 factory()->NewDeoptimizationInputData(length,
TENURED);
666 Handle<ByteArray> translations = translations_.CreateByteArray();
667 data->SetTranslationByteArray(*translations);
668 data->SetInlinedFunctionCount(
Smi::FromInt(inlined_function_count_));
670 Handle<FixedArray> literals =
671 factory()->NewFixedArray(deoptimization_literals_.length(),
TENURED);
672 for (
int i = 0; i < deoptimization_literals_.length(); i++) {
673 literals->set(i, *deoptimization_literals_[i]);
675 data->SetLiteralArray(*literals);
681 for (
int i = 0; i < length; i++) {
682 LEnvironment* env = deoptimizations_[i];
684 data->SetTranslationIndex(i,
Smi::FromInt(env->translation_index()));
685 data->SetArgumentsStackHeight(i,
689 code->set_deoptimization_data(*data);
693 int LCodeGen::DefineDeoptimizationLiteral(Handle<Object> literal) {
694 int result = deoptimization_literals_.length();
695 for (
int i = 0; i < deoptimization_literals_.length(); ++i) {
696 if (deoptimization_literals_[i].is_identical_to(literal))
return i;
698 deoptimization_literals_.Add(literal, zone());
703 void LCodeGen::PopulateDeoptimizationLiteralsWithInlinedFunctions() {
704 ASSERT(deoptimization_literals_.length() == 0);
706 const ZoneList<Handle<JSFunction> >* inlined_closures =
707 chunk()->inlined_closures();
709 for (
int i = 0, length = inlined_closures->length();
712 DefineDeoptimizationLiteral(inlined_closures->at(i));
715 inlined_function_count_ = deoptimization_literals_.length();
719 void LCodeGen::RecordSafepointWithLazyDeopt(
720 LInstruction* instr, SafepointMode safepoint_mode) {
721 if (safepoint_mode == RECORD_SIMPLE_SAFEPOINT) {
722 RecordSafepoint(instr->pointer_map(), Safepoint::kLazyDeopt);
724 ASSERT(safepoint_mode == RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
725 RecordSafepointWithRegisters(
726 instr->pointer_map(), 0, Safepoint::kLazyDeopt);
731 void LCodeGen::RecordSafepoint(
732 LPointerMap* pointers,
733 Safepoint::Kind kind,
735 Safepoint::DeoptMode deopt_mode) {
736 ASSERT(kind == expected_safepoint_kind_);
737 const ZoneList<LOperand*>* operands = pointers->GetNormalizedOperands();
738 Safepoint safepoint =
739 safepoints_.DefineSafepoint(masm(), kind, arguments, deopt_mode);
740 for (
int i = 0; i < operands->length(); i++) {
741 LOperand* pointer = operands->at(i);
742 if (pointer->IsStackSlot()) {
743 safepoint.DefinePointerSlot(pointer->index(), zone());
744 }
else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) {
745 safepoint.DefinePointerRegister(
ToRegister(pointer), zone());
751 void LCodeGen::RecordSafepoint(LPointerMap* pointers,
752 Safepoint::DeoptMode mode) {
753 RecordSafepoint(pointers, Safepoint::kSimple, 0, mode);
757 void LCodeGen::RecordSafepoint(Safepoint::DeoptMode mode) {
758 LPointerMap empty_pointers(RelocInfo::kNoPosition, zone());
759 RecordSafepoint(&empty_pointers, mode);
763 void LCodeGen::RecordSafepointWithRegisters(LPointerMap* pointers,
765 Safepoint::DeoptMode mode) {
766 RecordSafepoint(pointers, Safepoint::kWithRegisters, arguments, mode);
770 void LCodeGen::RecordPosition(
int position) {
771 if (position == RelocInfo::kNoPosition)
return;
772 masm()->positions_recorder()->RecordPosition(position);
776 void LCodeGen::DoLabel(LLabel* label) {
777 if (label->is_loop_header()) {
778 Comment(
";;; B%d - LOOP entry", label->block_id());
780 Comment(
";;; B%d", label->block_id());
782 __ bind(label->label());
783 current_block_ = label->block_id();
788 void LCodeGen::DoParallelMove(LParallelMove* move) {
789 resolver_.Resolve(move);
793 void LCodeGen::DoGap(LGap* gap) {
798 LParallelMove* move = gap->GetParallelMove(inner_pos);
799 if (move !=
NULL) DoParallelMove(move);
804 void LCodeGen::DoInstructionGap(LInstructionGap* instr) {
809 void LCodeGen::DoParameter(LParameter* instr) {
814 void LCodeGen::DoCallStub(LCallStub* instr) {
817 switch (instr->hydrogen()->major_key()) {
818 case CodeStub::RegExpConstructResult: {
819 RegExpConstructResultStub stub;
820 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
823 case CodeStub::RegExpExec: {
825 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
830 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
833 case CodeStub::NumberToString: {
834 NumberToStringStub stub;
835 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
838 case CodeStub::StringAdd: {
840 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
843 case CodeStub::StringCompare: {
844 StringCompareStub stub;
845 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
848 case CodeStub::TranscendentalCache: {
849 TranscendentalCacheStub stub(instr->transcendental_type(),
851 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
860 void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) {
865 void LCodeGen::DoModI(LModI* instr) {
866 if (instr->hydrogen()->HasPowerOf2Divisor()) {
867 Register dividend =
ToRegister(instr->InputAt(0));
872 if (divisor < 0) divisor = -divisor;
874 Label positive_dividend, done;
875 __ test(dividend, Operand(dividend));
876 __ j(
not_sign, &positive_dividend, Label::kNear);
878 __ and_(dividend, divisor - 1);
884 __ jmp(&done, Label::kNear);
886 __ bind(&positive_dividend);
887 __ and_(dividend, divisor - 1);
890 Label done, remainder_eq_dividend, slow, do_subtraction, both_positive;
891 Register left_reg =
ToRegister(instr->InputAt(0));
892 Register right_reg =
ToRegister(instr->InputAt(1));
893 Register result_reg =
ToRegister(instr->result());
902 __ test(right_reg, Operand(right_reg));
903 DeoptimizeIf(
zero, instr->environment());
906 __ test(left_reg, Operand(left_reg));
907 __ j(
zero, &remainder_eq_dividend, Label::kNear);
908 __ j(
sign, &slow, Label::kNear);
910 __ test(right_reg, Operand(right_reg));
915 __ bind(&both_positive);
918 __ cmp(left_reg, Operand(right_reg));
919 __ j(
less, &remainder_eq_dividend, Label::kNear);
922 Register scratch =
ToRegister(instr->TempAt(0));
923 __ mov(scratch, right_reg);
924 __ sub(Operand(scratch), Immediate(1));
925 __ test(scratch, Operand(right_reg));
926 __ j(
not_zero, &do_subtraction, Label::kNear);
927 __ and_(left_reg, Operand(scratch));
928 __ jmp(&remainder_eq_dividend, Label::kNear);
930 __ bind(&do_subtraction);
931 const int kUnfolds = 3;
933 __ mov(scratch, left_reg);
934 for (
int i = 0; i < kUnfolds; i++) {
936 __ sub(left_reg, Operand(right_reg));
938 __ cmp(left_reg, Operand(right_reg));
939 __ j(
less, &remainder_eq_dividend, Label::kNear);
941 __ mov(left_reg, scratch);
952 __ test(left_reg, Operand(left_reg));
957 __ test(result_reg, Operand(result_reg));
961 __ bind(&positive_left);
967 __ jmp(&done, Label::kNear);
969 __ bind(&remainder_eq_dividend);
970 __ mov(result_reg, left_reg);
977 void LCodeGen::DoDivI(LDivI* instr) {
978 LOperand* right = instr->InputAt(1);
984 Register left_reg =
eax;
989 __ test(right_reg, ToOperand(right));
990 DeoptimizeIf(
zero, instr->environment());
996 __ test(left_reg, Operand(left_reg));
998 __ test(right_reg, ToOperand(right));
999 DeoptimizeIf(
sign, instr->environment());
1000 __ bind(&left_not_zero);
1005 Label left_not_min_int;
1007 __ j(
not_zero, &left_not_min_int, Label::kNear);
1008 __ cmp(right_reg, -1);
1009 DeoptimizeIf(
zero, instr->environment());
1010 __ bind(&left_not_min_int);
1019 DeoptimizeIf(
not_zero, instr->environment());
1023 void LCodeGen::DoMulI(LMulI* instr) {
1024 Register left =
ToRegister(instr->InputAt(0));
1025 LOperand* right = instr->InputAt(1);
1031 if (right->IsConstantOperand()) {
1036 if (constant == -1) {
1038 }
else if (constant == 0) {
1039 __ xor_(left, Operand(left));
1040 }
else if (constant == 2) {
1041 __ add(left, Operand(left));
1051 __ lea(left, Operand(left, left,
times_2, 0));
1057 __ lea(left, Operand(left, left,
times_4, 0));
1063 __ lea(left, Operand(left, left,
times_8, 0));
1069 __ imul(left, left, constant);
1073 __ imul(left, left, constant);
1076 __ imul(left, ToOperand(right));
1080 DeoptimizeIf(
overflow, instr->environment());
1086 __ test(left, Operand(left));
1088 if (right->IsConstantOperand()) {
1094 __ or_(
ToRegister(instr->TempAt(0)), ToOperand(right));
1095 DeoptimizeIf(
sign, instr->environment());
1102 void LCodeGen::DoBitI(LBitI* instr) {
1103 LOperand* left = instr->InputAt(0);
1104 LOperand* right = instr->InputAt(1);
1105 ASSERT(left->Equals(instr->result()));
1106 ASSERT(left->IsRegister());
1108 if (right->IsConstantOperand()) {
1110 switch (instr->op()) {
1111 case Token::BIT_AND:
1117 case Token::BIT_XOR:
1125 switch (instr->op()) {
1126 case Token::BIT_AND:
1132 case Token::BIT_XOR:
1143 void LCodeGen::DoShiftI(LShiftI* instr) {
1144 LOperand* left = instr->InputAt(0);
1145 LOperand* right = instr->InputAt(1);
1146 ASSERT(left->Equals(instr->result()));
1147 ASSERT(left->IsRegister());
1148 if (right->IsRegister()) {
1151 switch (instr->op()) {
1157 if (instr->can_deopt()) {
1159 DeoptimizeIf(
not_zero, instr->environment());
1171 uint8_t shift_count =
static_cast<uint8_t
>(value & 0x1F);
1172 switch (instr->op()) {
1174 if (shift_count != 0) {
1179 if (shift_count == 0 && instr->can_deopt()) {
1181 DeoptimizeIf(
not_zero, instr->environment());
1187 if (shift_count != 0) {
1199 void LCodeGen::DoSubI(LSubI* instr) {
1200 LOperand* left = instr->InputAt(0);
1201 LOperand* right = instr->InputAt(1);
1202 ASSERT(left->Equals(instr->result()));
1204 if (right->IsConstantOperand()) {
1205 __ sub(ToOperand(left), ToInteger32Immediate(right));
1210 DeoptimizeIf(
overflow, instr->environment());
1215 void LCodeGen::DoConstantI(LConstantI* instr) {
1216 ASSERT(instr->result()->IsRegister());
1217 __ Set(
ToRegister(instr->result()), Immediate(instr->value()));
1221 void LCodeGen::DoConstantD(LConstantD* instr) {
1222 ASSERT(instr->result()->IsDoubleRegister());
1223 XMMRegister res = ToDoubleRegister(instr->result());
1224 double v = instr->value();
1227 if (BitCast<uint64_t, double>(v) == 0) {
1230 Register temp =
ToRegister(instr->TempAt(0));
1231 uint64_t int_val = BitCast<uint64_t, double>(v);
1235 CpuFeatures::Scope scope(
SSE4_1);
1237 __ Set(temp, Immediate(lower));
1238 __ movd(res, Operand(temp));
1239 __ Set(temp, Immediate(upper));
1240 __ pinsrd(res, Operand(temp), 1);
1243 __ Set(temp, Immediate(upper));
1244 __ pinsrd(res, Operand(temp), 1);
1247 __ Set(temp, Immediate(upper));
1248 __ movd(res, Operand(temp));
1251 __ Set(temp, Immediate(lower));
1252 __ movd(
xmm0, Operand(temp));
1260 void LCodeGen::DoConstantT(LConstantT* instr) {
1262 Handle<Object> handle = instr->value();
1263 if (handle->IsHeapObject()) {
1264 __ LoadHeapObject(reg, Handle<HeapObject>::cast(handle));
1266 __ Set(reg, Immediate(handle));
1271 void LCodeGen::DoJSArrayLength(LJSArrayLength* instr) {
1272 Register result =
ToRegister(instr->result());
1273 Register array =
ToRegister(instr->InputAt(0));
1278 void LCodeGen::DoFixedArrayBaseLength(
1279 LFixedArrayBaseLength* instr) {
1280 Register result =
ToRegister(instr->result());
1281 Register array =
ToRegister(instr->InputAt(0));
1286 void LCodeGen::DoElementsKind(LElementsKind* instr) {
1287 Register result =
ToRegister(instr->result());
1288 Register input =
ToRegister(instr->InputAt(0));
1301 void LCodeGen::DoValueOf(LValueOf* instr) {
1302 Register input =
ToRegister(instr->InputAt(0));
1303 Register result =
ToRegister(instr->result());
1305 ASSERT(input.is(result));
1309 __ JumpIfSmi(input, &done, Label::kNear);
1320 void LCodeGen::DoDateField(LDateField* instr) {
1321 Register
object =
ToRegister(instr->InputAt(0));
1322 Register result =
ToRegister(instr->result());
1323 Register scratch =
ToRegister(instr->TempAt(0));
1324 Smi* index = instr->index();
1325 Label runtime, done;
1326 ASSERT(
object.is(result));
1330 __ AbortIfSmi(
object);
1332 __ Assert(
equal,
"Trying to get date field from non-date.");
1335 if (index->value() == 0) {
1339 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
1340 __ mov(scratch, Operand::StaticVariable(stamp));
1348 __ PrepareCallCFunction(2, scratch);
1349 __ mov(Operand(
esp, 0),
object);
1351 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
1357 void LCodeGen::DoBitNotI(LBitNotI* instr) {
1358 LOperand* input = instr->InputAt(0);
1359 ASSERT(input->Equals(instr->result()));
1364 void LCodeGen::DoThrow(LThrow* instr) {
1365 __ push(ToOperand(instr->value()));
1367 CallRuntime(Runtime::kThrow, 1, instr);
1369 if (FLAG_debug_code) {
1370 Comment(
"Unreachable code.");
1376 void LCodeGen::DoAddI(LAddI* instr) {
1377 LOperand* left = instr->InputAt(0);
1378 LOperand* right = instr->InputAt(1);
1379 ASSERT(left->Equals(instr->result()));
1381 if (right->IsConstantOperand()) {
1382 __ add(ToOperand(left), ToInteger32Immediate(right));
1388 DeoptimizeIf(
overflow, instr->environment());
1393 void LCodeGen::DoArithmeticD(LArithmeticD* instr) {
1394 XMMRegister left = ToDoubleRegister(instr->InputAt(0));
1395 XMMRegister right = ToDoubleRegister(instr->InputAt(1));
1396 XMMRegister result = ToDoubleRegister(instr->result());
1398 ASSERT(instr->op() == Token::MOD || left.is(result));
1399 switch (instr->op()) {
1401 __ addsd(left, right);
1404 __ subsd(left, right);
1407 __ mulsd(left, right);
1410 __ divsd(left, right);
1414 __ PrepareCallCFunction(4,
eax);
1418 ExternalReference::double_fp_operation(Token::MOD, isolate()),
1424 __ fstp_d(Operand(
esp, 0));
1425 __ movdbl(result, Operand(
esp, 0));
1436 void LCodeGen::DoArithmeticT(LArithmeticT* instr) {
1443 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
1448 int LCodeGen::GetNextEmittedBlock(
int block) {
1449 for (
int i = block + 1; i < graph()->blocks()->length(); ++i) {
1450 LLabel* label = chunk_->GetLabel(i);
1451 if (!label->HasReplacement())
return i;
1457 void LCodeGen::EmitBranch(
int left_block,
int right_block,
Condition cc) {
1458 int next_block = GetNextEmittedBlock(current_block_);
1459 right_block = chunk_->LookupDestination(right_block);
1460 left_block = chunk_->LookupDestination(left_block);
1462 if (right_block == left_block) {
1463 EmitGoto(left_block);
1464 }
else if (left_block == next_block) {
1466 }
else if (right_block == next_block) {
1467 __ j(cc, chunk_->GetAssemblyLabel(left_block));
1469 __ j(cc, chunk_->GetAssemblyLabel(left_block));
1470 __ jmp(chunk_->GetAssemblyLabel(right_block));
1475 void LCodeGen::DoBranch(LBranch* instr) {
1476 int true_block = chunk_->LookupDestination(instr->true_block_id());
1477 int false_block = chunk_->LookupDestination(instr->false_block_id());
1479 Representation r = instr->hydrogen()->value()->representation();
1480 if (r.IsInteger32()) {
1481 Register reg =
ToRegister(instr->InputAt(0));
1482 __ test(reg, Operand(reg));
1483 EmitBranch(true_block, false_block,
not_zero);
1484 }
else if (r.IsDouble()) {
1485 XMMRegister reg = ToDoubleRegister(instr->InputAt(0));
1488 EmitBranch(true_block, false_block,
not_equal);
1491 Register reg =
ToRegister(instr->InputAt(0));
1492 HType
type = instr->hydrogen()->value()->type();
1493 if (type.IsBoolean()) {
1494 __ cmp(reg, factory()->true_value());
1495 EmitBranch(true_block, false_block,
equal);
1496 }
else if (type.IsSmi()) {
1497 __ test(reg, Operand(reg));
1498 EmitBranch(true_block, false_block,
not_equal);
1500 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1501 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1503 ToBooleanStub::Types expected = instr->hydrogen()->expected_input_types();
1509 __ cmp(reg, factory()->undefined_value());
1514 __ cmp(reg, factory()->true_value());
1517 __ cmp(reg, factory()->false_value());
1522 __ cmp(reg, factory()->null_value());
1528 __ test(reg, Operand(reg));
1530 __ JumpIfSmi(reg, true_label);
1531 }
else if (expected.NeedsMap()) {
1534 DeoptimizeIf(
zero, instr->environment());
1538 if (expected.NeedsMap()) {
1543 if (expected.CanBeUndetectable()) {
1564 __ jmp(false_label);
1565 __ bind(¬_string);
1570 Label not_heap_number;
1572 factory()->heap_number_map());
1579 __ bind(¬_heap_number);
1589 void LCodeGen::EmitGoto(
int block) {
1590 block = chunk_->LookupDestination(block);
1591 int next_block = GetNextEmittedBlock(current_block_);
1592 if (block != next_block) {
1593 __ jmp(chunk_->GetAssemblyLabel(block));
1598 void LCodeGen::DoGoto(LGoto* instr) {
1599 EmitGoto(instr->block_id());
1607 case Token::EQ_STRICT:
1623 case Token::INSTANCEOF:
1631 void LCodeGen::DoCmpIDAndBranch(LCmpIDAndBranch* instr) {
1632 LOperand* left = instr->InputAt(0);
1633 LOperand* right = instr->InputAt(1);
1634 int false_block = chunk_->LookupDestination(instr->false_block_id());
1635 int true_block = chunk_->LookupDestination(instr->true_block_id());
1636 Condition cc = TokenToCondition(instr->op(), instr->is_double());
1638 if (left->IsConstantOperand() && right->IsConstantOperand()) {
1645 EmitGoto(next_block);
1647 if (instr->is_double()) {
1650 __ ucomisd(ToDoubleRegister(left), ToDoubleRegister(right));
1653 if (right->IsConstantOperand()) {
1655 }
else if (left->IsConstantOperand()) {
1656 __ cmp(ToOperand(right), ToInteger32Immediate(left));
1663 EmitBranch(true_block, false_block, cc);
1668 void LCodeGen::DoCmpObjectEqAndBranch(LCmpObjectEqAndBranch* instr) {
1669 Register left =
ToRegister(instr->InputAt(0));
1670 Operand right = ToOperand(instr->InputAt(1));
1671 int false_block = chunk_->LookupDestination(instr->false_block_id());
1672 int true_block = chunk_->LookupDestination(instr->true_block_id());
1674 __ cmp(left, Operand(right));
1675 EmitBranch(true_block, false_block,
equal);
1679 void LCodeGen::DoCmpConstantEqAndBranch(LCmpConstantEqAndBranch* instr) {
1680 Register left =
ToRegister(instr->InputAt(0));
1681 int true_block = chunk_->LookupDestination(instr->true_block_id());
1682 int false_block = chunk_->LookupDestination(instr->false_block_id());
1684 __ cmp(left, instr->hydrogen()->right());
1685 EmitBranch(true_block, false_block,
equal);
1689 void LCodeGen::DoIsNilAndBranch(LIsNilAndBranch* instr) {
1690 Register reg =
ToRegister(instr->InputAt(0));
1691 int false_block = chunk_->LookupDestination(instr->false_block_id());
1695 if (instr->hydrogen()->representation().IsSpecialization() ||
1696 instr->hydrogen()->type().IsSmi()) {
1697 EmitGoto(false_block);
1701 int true_block = chunk_->LookupDestination(instr->true_block_id());
1702 Handle<Object> nil_value = instr->nil() ==
kNullValue ?
1703 factory()->null_value() :
1704 factory()->undefined_value();
1705 __ cmp(reg, nil_value);
1707 EmitBranch(true_block, false_block,
equal);
1709 Handle<Object> other_nil_value = instr->nil() ==
kNullValue ?
1710 factory()->undefined_value() :
1711 factory()->null_value();
1712 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1713 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1715 __ cmp(reg, other_nil_value);
1717 __ JumpIfSmi(reg, false_label);
1720 Register scratch =
ToRegister(instr->TempAt(0));
1724 EmitBranch(true_block, false_block,
not_zero);
1729 Condition LCodeGen::EmitIsObject(Register input,
1731 Label* is_not_object,
1733 __ JumpIfSmi(input, is_not_object);
1735 __ cmp(input, isolate()->factory()->null_value());
1752 void LCodeGen::DoIsObjectAndBranch(LIsObjectAndBranch* instr) {
1753 Register reg =
ToRegister(instr->InputAt(0));
1754 Register temp =
ToRegister(instr->TempAt(0));
1756 int true_block = chunk_->LookupDestination(instr->true_block_id());
1757 int false_block = chunk_->LookupDestination(instr->false_block_id());
1758 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1759 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1761 Condition true_cond = EmitIsObject(reg, temp, false_label, true_label);
1763 EmitBranch(true_block, false_block, true_cond);
1767 Condition LCodeGen::EmitIsString(Register input,
1769 Label* is_not_string) {
1770 __ JumpIfSmi(input, is_not_string);
1772 Condition cond = masm_->IsObjectStringType(input, temp1, temp1);
1778 void LCodeGen::DoIsStringAndBranch(LIsStringAndBranch* instr) {
1779 Register reg =
ToRegister(instr->InputAt(0));
1780 Register temp =
ToRegister(instr->TempAt(0));
1782 int true_block = chunk_->LookupDestination(instr->true_block_id());
1783 int false_block = chunk_->LookupDestination(instr->false_block_id());
1784 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1786 Condition true_cond = EmitIsString(reg, temp, false_label);
1788 EmitBranch(true_block, false_block, true_cond);
1792 void LCodeGen::DoIsSmiAndBranch(LIsSmiAndBranch* instr) {
1793 Operand input = ToOperand(instr->InputAt(0));
1795 int true_block = chunk_->LookupDestination(instr->true_block_id());
1796 int false_block = chunk_->LookupDestination(instr->false_block_id());
1799 EmitBranch(true_block, false_block,
zero);
1803 void LCodeGen::DoIsUndetectableAndBranch(LIsUndetectableAndBranch* instr) {
1804 Register input =
ToRegister(instr->InputAt(0));
1805 Register temp =
ToRegister(instr->TempAt(0));
1807 int true_block = chunk_->LookupDestination(instr->true_block_id());
1808 int false_block = chunk_->LookupDestination(instr->false_block_id());
1811 __ JumpIfSmi(input, chunk_->GetAssemblyLabel(false_block));
1815 EmitBranch(true_block, false_block,
not_zero);
1821 case Token::EQ_STRICT:
1839 void LCodeGen::DoStringCompareAndBranch(LStringCompareAndBranch* instr) {
1841 int true_block = chunk_->LookupDestination(instr->true_block_id());
1842 int false_block = chunk_->LookupDestination(instr->false_block_id());
1845 CallCode(ic, RelocInfo::CODE_TARGET, instr);
1847 Condition condition = ComputeCompareCondition(op);
1850 EmitBranch(true_block, false_block, condition);
1854 static InstanceType TestType(HHasInstanceTypeAndBranch* instr) {
1863 static Condition BranchCondition(HHasInstanceTypeAndBranch* instr) {
1866 if (from == to)
return equal;
1874 void LCodeGen::DoHasInstanceTypeAndBranch(LHasInstanceTypeAndBranch* instr) {
1875 Register input =
ToRegister(instr->InputAt(0));
1876 Register temp =
ToRegister(instr->TempAt(0));
1878 int true_block = chunk_->LookupDestination(instr->true_block_id());
1879 int false_block = chunk_->LookupDestination(instr->false_block_id());
1881 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1883 __ JumpIfSmi(input, false_label);
1885 __ CmpObjectType(input, TestType(instr->hydrogen()), temp);
1886 EmitBranch(true_block, false_block, BranchCondition(instr->hydrogen()));
1890 void LCodeGen::DoGetCachedArrayIndex(LGetCachedArrayIndex* instr) {
1891 Register input =
ToRegister(instr->InputAt(0));
1892 Register result =
ToRegister(instr->result());
1894 if (FLAG_debug_code) {
1895 __ AbortIfNotString(input);
1899 __ IndexFromHash(result, result);
1903 void LCodeGen::DoHasCachedArrayIndexAndBranch(
1904 LHasCachedArrayIndexAndBranch* instr) {
1905 Register input =
ToRegister(instr->InputAt(0));
1907 int true_block = chunk_->LookupDestination(instr->true_block_id());
1908 int false_block = chunk_->LookupDestination(instr->false_block_id());
1912 EmitBranch(true_block, false_block,
equal);
1918 void LCodeGen::EmitClassOfTest(Label* is_true,
1920 Handle<String>class_name,
1925 ASSERT(!input.is(temp2));
1927 __ JumpIfSmi(input, is_false);
1929 if (class_name->IsEqualTo(
CStrVector(
"Function"))) {
1959 if (class_name->IsEqualTo(
CStrVector(
"Object"))) {
1976 __ cmp(temp, class_name);
1981 void LCodeGen::DoClassOfTestAndBranch(LClassOfTestAndBranch* instr) {
1982 Register input =
ToRegister(instr->InputAt(0));
1983 Register temp =
ToRegister(instr->TempAt(0));
1984 Register temp2 =
ToRegister(instr->TempAt(1));
1986 Handle<String> class_name = instr->hydrogen()->class_name();
1988 int true_block = chunk_->LookupDestination(instr->true_block_id());
1989 int false_block = chunk_->LookupDestination(instr->false_block_id());
1991 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1992 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1994 EmitClassOfTest(true_label, false_label, class_name, input, temp, temp2);
1996 EmitBranch(true_block, false_block,
equal);
2000 void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) {
2001 Register reg =
ToRegister(instr->InputAt(0));
2002 int true_block = instr->true_block_id();
2003 int false_block = instr->false_block_id();
2006 EmitBranch(true_block, false_block,
equal);
2010 void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
2014 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
2016 Label true_value, done;
2018 __ j(
zero, &true_value, Label::kNear);
2019 __ mov(
ToRegister(instr->result()), factory()->false_value());
2020 __ jmp(&done, Label::kNear);
2021 __ bind(&true_value);
2022 __ mov(
ToRegister(instr->result()), factory()->true_value());
2027 void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
2028 class DeferredInstanceOfKnownGlobal:
public LDeferredCode {
2030 DeferredInstanceOfKnownGlobal(LCodeGen* codegen,
2031 LInstanceOfKnownGlobal* instr)
2032 : LDeferredCode(codegen), instr_(instr) { }
2033 virtual void Generate() {
2034 codegen()->DoDeferredInstanceOfKnownGlobal(instr_, &map_check_);
2036 virtual LInstruction* instr() {
return instr_; }
2037 Label* map_check() {
return &map_check_; }
2039 LInstanceOfKnownGlobal* instr_;
2043 DeferredInstanceOfKnownGlobal* deferred;
2044 deferred =
new(zone()) DeferredInstanceOfKnownGlobal(
this, instr);
2046 Label done, false_result;
2047 Register
object =
ToRegister(instr->InputAt(1));
2048 Register temp =
ToRegister(instr->TempAt(0));
2051 __ JumpIfSmi(
object, &false_result);
2059 __ bind(deferred->map_check());
2060 Handle<JSGlobalPropertyCell> cache_cell =
2061 factory()->NewJSGlobalPropertyCell(factory()->the_hole_value());
2062 __ cmp(map, Operand::Cell(cache_cell));
2064 __ mov(
eax, factory()->the_hole_value());
2069 __ bind(&cache_miss);
2071 __ cmp(
object, factory()->null_value());
2075 Condition is_string = masm_->IsObjectStringType(
object, temp, temp);
2076 __ j(is_string, &false_result);
2079 __ jmp(deferred->entry());
2081 __ bind(&false_result);
2082 __ mov(
ToRegister(instr->result()), factory()->false_value());
2086 __ bind(deferred->exit());
2091 void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
2093 PushSafepointRegistersScope scope(
this);
2102 InstanceofStub stub(flags);
2108 Register temp =
ToRegister(instr->TempAt(0));
2109 ASSERT(MacroAssembler::SafepointRegisterStackIndex(temp) == 0);
2111 static const int kAdditionalDelta = 13;
2112 int delta = masm_->SizeOfCodeGeneratedSince(map_check) + kAdditionalDelta;
2113 __ mov(temp, Immediate(delta));
2114 __ StoreToSafepointRegisterSlot(temp, temp);
2115 CallCodeGeneric(stub.GetCode(),
2116 RelocInfo::CODE_TARGET,
2118 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
2121 LEnvironment* env = instr->GetDeferredLazyDeoptimizationEnvironment();
2122 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
2125 __ StoreToSafepointRegisterSlot(
eax,
eax);
2129 void LCodeGen::DoCmpT(LCmpT* instr) {
2133 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2135 Condition condition = ComputeCompareCondition(op);
2136 Label true_value, done;
2138 __ j(condition, &true_value, Label::kNear);
2139 __ mov(
ToRegister(instr->result()), factory()->false_value());
2140 __ jmp(&done, Label::kNear);
2141 __ bind(&true_value);
2142 __ mov(
ToRegister(instr->result()), factory()->true_value());
2147 void LCodeGen::DoReturn(LReturn* instr) {
2155 __ CallRuntime(Runtime::kTraceExit, 1);
2157 if (dynamic_frame_alignment_) {
2164 if (dynamic_frame_alignment_) {
2168 if (FLAG_debug_code) {
2171 __ Assert(
equal,
"expected alignment marker");
2174 __ bind(&no_padding);
2180 void LCodeGen::DoLoadGlobalCell(LLoadGlobalCell* instr) {
2181 Register result =
ToRegister(instr->result());
2182 __ mov(result, Operand::Cell(instr->hydrogen()->cell()));
2183 if (instr->hydrogen()->RequiresHoleCheck()) {
2184 __ cmp(result, factory()->the_hole_value());
2185 DeoptimizeIf(
equal, instr->environment());
2190 void LCodeGen::DoLoadGlobalGeneric(LLoadGlobalGeneric* instr) {
2195 __ mov(
ecx, instr->name());
2196 RelocInfo::Mode mode = instr->for_typeof() ? RelocInfo::CODE_TARGET :
2197 RelocInfo::CODE_TARGET_CONTEXT;
2198 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
2199 CallCode(ic, mode, instr);
2203 void LCodeGen::DoStoreGlobalCell(LStoreGlobalCell* instr) {
2205 Handle<JSGlobalPropertyCell> cell_handle = instr->hydrogen()->cell();
2211 if (instr->hydrogen()->RequiresHoleCheck()) {
2212 __ cmp(Operand::Cell(cell_handle), factory()->the_hole_value());
2213 DeoptimizeIf(
equal, instr->environment());
2217 __ mov(Operand::Cell(cell_handle), value);
2222 void LCodeGen::DoStoreGlobalGeneric(LStoreGlobalGeneric* instr) {
2227 __ mov(
ecx, instr->name());
2228 Handle<Code> ic = (instr->strict_mode_flag() ==
kStrictMode)
2229 ? isolate()->builtins()->StoreIC_Initialize_Strict()
2230 : isolate()->builtins()->StoreIC_Initialize();
2231 CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr);
2235 void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) {
2236 Register context =
ToRegister(instr->context());
2237 Register result =
ToRegister(instr->result());
2240 if (instr->hydrogen()->RequiresHoleCheck()) {
2241 __ cmp(result, factory()->the_hole_value());
2242 if (instr->hydrogen()->DeoptimizesOnHole()) {
2243 DeoptimizeIf(
equal, instr->environment());
2247 __ mov(result, factory()->undefined_value());
2248 __ bind(&is_not_hole);
2254 void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) {
2255 Register context =
ToRegister(instr->context());
2258 Label skip_assignment;
2261 if (instr->hydrogen()->RequiresHoleCheck()) {
2262 __ cmp(target, factory()->the_hole_value());
2263 if (instr->hydrogen()->DeoptimizesOnHole()) {
2264 DeoptimizeIf(
equal, instr->environment());
2270 __ mov(target, value);
2271 if (instr->hydrogen()->NeedsWriteBarrier()) {
2272 HType type = instr->hydrogen()->value()->type();
2275 Register temp =
ToRegister(instr->TempAt(0));
2277 __ RecordWriteContextSlot(context,
2286 __ bind(&skip_assignment);
2290 void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) {
2291 Register
object =
ToRegister(instr->object());
2292 Register result =
ToRegister(instr->result());
2293 if (instr->hydrogen()->is_in_object()) {
2294 __ mov(result,
FieldOperand(
object, instr->hydrogen()->offset()));
2297 __ mov(result,
FieldOperand(result, instr->hydrogen()->offset()));
2302 void LCodeGen::EmitLoadFieldOrConstantFunction(Register result,
2305 Handle<String>
name,
2306 LEnvironment* env) {
2307 LookupResult lookup(isolate());
2308 type->LookupInDescriptors(
NULL, *name, &lookup);
2309 ASSERT(lookup.IsFound() || lookup.IsCacheable());
2310 if (lookup.IsFound() && lookup.type() ==
FIELD) {
2311 int index = lookup.GetLocalFieldIndexFromMap(*type);
2316 __ mov(result,
FieldOperand(
object, offset + type->instance_size()));
2323 Handle<JSFunction>
function(lookup.GetConstantFunctionFromMap(*type));
2324 __ LoadHeapObject(result,
function);
2330 while (current != heap->null_value()) {
2331 Handle<HeapObject> link(current);
2332 __ LoadHeapObject(result, link);
2338 __ mov(result, factory()->undefined_value());
2343 void LCodeGen::EmitPushTaggedOperand(LOperand* operand) {
2344 ASSERT(!operand->IsDoubleRegister());
2345 if (operand->IsConstantOperand()) {
2347 if (object->IsSmi()) {
2348 __ Push(Handle<Smi>::cast(
object));
2350 __ PushHeapObject(Handle<HeapObject>::cast(
object));
2352 }
else if (operand->IsRegister()) {
2355 __ push(ToOperand(operand));
2362 static bool CompactEmit(
2363 SmallMapList* list, Handle<String> name,
int i, Isolate* isolate) {
2364 LookupResult lookup(isolate);
2365 Handle<Map> map = list->at(i);
2366 map->LookupInDescriptors(
NULL, *name, &lookup);
2367 return lookup.IsFound() &&
2372 void LCodeGen::DoLoadNamedFieldPolymorphic(LLoadNamedFieldPolymorphic* instr) {
2373 Register
object =
ToRegister(instr->object());
2374 Register result =
ToRegister(instr->result());
2376 int map_count = instr->hydrogen()->types()->length();
2377 bool need_generic = instr->hydrogen()->need_generic();
2379 if (map_count == 0 && !need_generic) {
2383 Handle<String> name = instr->hydrogen()->name();
2385 bool all_are_compact =
true;
2386 for (
int i = 0; i < map_count; ++i) {
2387 if (!CompactEmit(instr->hydrogen()->types(),
name, i, isolate())) {
2388 all_are_compact =
false;
2392 for (
int i = 0; i < map_count; ++i) {
2393 bool last = (i == map_count - 1);
2394 Handle<Map> map = instr->hydrogen()->types()->at(i);
2397 if (last && !need_generic) {
2398 DeoptimizeIf(
not_equal, instr->environment());
2399 __ bind(&check_passed);
2400 EmitLoadFieldOrConstantFunction(
2401 result,
object, map, name, instr->environment());
2404 bool compact = all_are_compact ?
true :
2405 CompactEmit(instr->hydrogen()->types(),
name, i, isolate());
2406 __ j(
not_equal, &next, compact ? Label::kNear : Label::kFar);
2407 __ bind(&check_passed);
2408 EmitLoadFieldOrConstantFunction(
2409 result,
object, map, name, instr->environment());
2410 __ jmp(&done, all_are_compact ? Label::kNear : Label::kFar);
2416 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
2417 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2423 void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) {
2428 __ mov(
ecx, instr->name());
2429 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
2430 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2434 void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) {
2435 Register
function =
ToRegister(instr->function());
2436 Register temp =
ToRegister(instr->TempAt(0));
2437 Register result =
ToRegister(instr->result());
2441 DeoptimizeIf(
not_equal, instr->environment());
2454 __ cmp(Operand(result), Immediate(factory()->the_hole_value()));
2455 DeoptimizeIf(
equal, instr->environment());
2464 __ jmp(&done, Label::kNear);
2468 __ bind(&non_instance);
2476 void LCodeGen::DoLoadElements(LLoadElements* instr) {
2477 Register result =
ToRegister(instr->result());
2478 Register input =
ToRegister(instr->InputAt(0));
2480 if (FLAG_debug_code) {
2481 Label done, ok, fail;
2483 Immediate(factory()->fixed_array_map()));
2484 __ j(
equal, &done, Label::kNear);
2486 Immediate(factory()->fixed_cow_array_map()));
2487 __ j(
equal, &done, Label::kNear);
2488 Register temp((result.is(
eax)) ?
ebx :
eax);
2495 __ j(
less, &fail, Label::kNear);
2499 __ j(
less, &fail, Label::kNear);
2503 __ Abort(
"Check for fast or external elements failed.");
2511 void LCodeGen::DoLoadExternalArrayPointer(
2512 LLoadExternalArrayPointer* instr) {
2513 Register result =
ToRegister(instr->result());
2514 Register input =
ToRegister(instr->InputAt(0));
2520 void LCodeGen::DoAccessArgumentsAt(LAccessArgumentsAt* instr) {
2521 Register arguments =
ToRegister(instr->arguments());
2522 Register length =
ToRegister(instr->length());
2523 Operand index = ToOperand(instr->index());
2524 Register result =
ToRegister(instr->result());
2526 __ sub(length, index);
2531 __ mov(result, Operand(arguments, length,
times_4, kPointerSize));
2535 void LCodeGen::DoLoadKeyedFastElement(LLoadKeyedFastElement* instr) {
2536 Register result =
ToRegister(instr->result());
2540 BuildFastArrayOperand(instr->elements(),
2544 instr->additional_index()));
2547 if (instr->hydrogen()->RequiresHoleCheck()) {
2550 DeoptimizeIf(
not_equal, instr->environment());
2552 __ cmp(result, factory()->the_hole_value());
2553 DeoptimizeIf(
equal, instr->environment());
2559 void LCodeGen::DoLoadKeyedFastDoubleElement(
2560 LLoadKeyedFastDoubleElement* instr) {
2561 XMMRegister result = ToDoubleRegister(instr->result());
2563 if (instr->hydrogen()->RequiresHoleCheck()) {
2566 Operand hole_check_operand = BuildFastArrayOperand(
2567 instr->elements(), instr->key(),
2570 instr->additional_index());
2572 DeoptimizeIf(
equal, instr->environment());
2575 Operand double_load_operand = BuildFastArrayOperand(
2580 instr->additional_index());
2581 __ movdbl(result, double_load_operand);
2585 Operand LCodeGen::BuildFastArrayOperand(
2586 LOperand* elements_pointer,
2590 uint32_t additional_index) {
2591 Register elements_pointer_reg =
ToRegister(elements_pointer);
2593 if (key->IsConstantOperand()) {
2595 if (constant_value & 0xF0000000) {
2596 Abort(
"array index constant value too big");
2598 return Operand(elements_pointer_reg,
2599 ((constant_value + additional_index) << shift_size)
2603 return Operand(elements_pointer_reg,
2606 offset + (additional_index << shift_size));
2611 void LCodeGen::DoLoadKeyedSpecializedArrayElement(
2612 LLoadKeyedSpecializedArrayElement* instr) {
2614 Operand operand(BuildFastArrayOperand(instr->external_pointer(),
2618 instr->additional_index()));
2620 XMMRegister result(ToDoubleRegister(instr->result()));
2621 __ movss(result, operand);
2622 __ cvtss2sd(result, result);
2624 __ movdbl(ToDoubleRegister(instr->result()), operand);
2626 Register result(
ToRegister(instr->result()));
2627 switch (elements_kind) {
2629 __ movsx_b(result, operand);
2633 __ movzx_b(result, operand);
2636 __ movsx_w(result, operand);
2639 __ movzx_w(result, operand);
2642 __ mov(result, operand);
2645 __ mov(result, operand);
2646 __ test(result, Operand(result));
2650 DeoptimizeIf(
negative, instr->environment());
2669 void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) {
2674 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
2675 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2679 void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) {
2680 Register result =
ToRegister(instr->result());
2682 if (instr->hydrogen()->from_inlined()) {
2683 __ lea(result, Operand(
esp, -2 * kPointerSize));
2686 Label done, adapted;
2689 __ cmp(Operand(result),
2691 __ j(
equal, &adapted, Label::kNear);
2694 __ mov(result, Operand(
ebp));
2695 __ jmp(&done, Label::kNear);
2708 void LCodeGen::DoArgumentsLength(LArgumentsLength* instr) {
2709 Operand elem = ToOperand(instr->InputAt(0));
2710 Register result =
ToRegister(instr->result());
2716 __ mov(result, Immediate(scope()->num_parameters()));
2717 __ j(
equal, &done, Label::kNear);
2721 __ mov(result, Operand(result,
2723 __ SmiUntag(result);
2730 void LCodeGen::DoWrapReceiver(LWrapReceiver* instr) {
2731 Register receiver =
ToRegister(instr->receiver());
2732 Register
function =
ToRegister(instr->function());
2733 Register scratch =
ToRegister(instr->TempAt(0));
2738 Label global_object, receiver_ok;
2754 __ cmp(receiver, factory()->null_value());
2755 __ j(
equal, &global_object, Label::kNear);
2756 __ cmp(receiver, factory()->undefined_value());
2757 __ j(
equal, &global_object, Label::kNear);
2761 DeoptimizeIf(
equal, instr->environment());
2763 DeoptimizeIf(
below, instr->environment());
2764 __ jmp(&receiver_ok, Label::kNear);
2766 __ bind(&global_object);
2774 __ bind(&receiver_ok);
2778 void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
2779 Register receiver =
ToRegister(instr->receiver());
2780 Register
function =
ToRegister(instr->function());
2781 Register length =
ToRegister(instr->length());
2782 Register elements =
ToRegister(instr->elements());
2789 const uint32_t kArgumentsLimit = 1 *
KB;
2790 __ cmp(length, kArgumentsLimit);
2791 DeoptimizeIf(
above, instr->environment());
2794 __ mov(receiver, length);
2800 __ test(length, Operand(length));
2801 __ j(
zero, &invoke, Label::kNear);
2809 ASSERT(instr->HasPointerMap());
2810 LPointerMap* pointers = instr->pointer_map();
2811 RecordPosition(pointers->position());
2812 SafepointGenerator safepoint_generator(
2813 this, pointers, Safepoint::kLazyDeopt);
2814 ParameterCount actual(
eax);
2820 void LCodeGen::DoPushArgument(LPushArgument* instr) {
2821 LOperand* argument = instr->InputAt(0);
2822 EmitPushTaggedOperand(argument);
2826 void LCodeGen::DoDrop(LDrop* instr) {
2827 __ Drop(instr->count());
2831 void LCodeGen::DoThisFunction(LThisFunction* instr) {
2832 Register result =
ToRegister(instr->result());
2833 __ LoadHeapObject(result, instr->hydrogen()->closure());
2837 void LCodeGen::DoContext(LContext* instr) {
2838 Register result =
ToRegister(instr->result());
2843 void LCodeGen::DoOuterContext(LOuterContext* instr) {
2844 Register context =
ToRegister(instr->context());
2845 Register result =
ToRegister(instr->result());
2851 void LCodeGen::DoDeclareGlobals(LDeclareGlobals* instr) {
2854 __ push(Immediate(instr->hydrogen()->pairs()));
2856 CallRuntime(Runtime::kDeclareGlobals, 3, instr);
2860 void LCodeGen::DoGlobalObject(LGlobalObject* instr) {
2861 Register context =
ToRegister(instr->context());
2862 Register result =
ToRegister(instr->result());
2867 void LCodeGen::DoGlobalReceiver(LGlobalReceiver* instr) {
2868 Register global =
ToRegister(instr->global());
2869 Register result =
ToRegister(instr->result());
2874 void LCodeGen::CallKnownFunction(Handle<JSFunction>
function,
2876 LInstruction* instr,
2878 EDIState edi_state) {
2879 bool can_invoke_directly = !
function->NeedsArgumentsAdaption() ||
2880 function->shared()->formal_parameter_count() == arity;
2882 LPointerMap* pointers = instr->pointer_map();
2883 RecordPosition(pointers->position());
2885 if (can_invoke_directly) {
2886 if (edi_state == EDI_UNINITIALIZED) {
2887 __ LoadHeapObject(
edi,
function);
2891 bool change_context =
2892 (info()->closure()->context() !=
function->context()) ||
2893 scope()->contains_with() ||
2894 (scope()->num_heap_slots() > 0);
2896 if (change_context) {
2904 if (!function->NeedsArgumentsAdaption()) {
2909 __ SetCallKind(
ecx, call_kind);
2910 if (*
function == *info()->closure()) {
2915 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT);
2918 SafepointGenerator generator(
2919 this, pointers, Safepoint::kLazyDeopt);
2920 ParameterCount count(arity);
2921 __ InvokeFunction(
function, count,
CALL_FUNCTION, generator, call_kind);
2926 void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) {
2928 CallKnownFunction(instr->function(),
2936 void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr) {
2937 Register input_reg =
ToRegister(instr->value());
2939 factory()->heap_number_map());
2940 DeoptimizeIf(
not_equal, instr->environment());
2943 Register tmp = input_reg.is(
eax) ?
ecx :
eax;
2947 PushSafepointRegistersScope scope(
this);
2961 Label allocated, slow;
2962 __ AllocateHeapNumber(tmp, tmp2,
no_reg, &slow);
2968 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0,
2969 instr, instr->context());
2972 if (!tmp.is(
eax))
__ mov(tmp,
eax);
2975 __ LoadFromSafepointRegisterSlot(input_reg, input_reg);
2977 __ bind(&allocated);
2983 __ StoreToSafepointRegisterSlot(input_reg, tmp);
2989 void LCodeGen::EmitIntegerMathAbs(LUnaryMathOperation* instr) {
2990 Register input_reg =
ToRegister(instr->value());
2991 __ test(input_reg, Operand(input_reg));
2995 __ test(input_reg, Operand(input_reg));
2996 DeoptimizeIf(negative, instr->environment());
2997 __ bind(&is_positive);
3001 void LCodeGen::DoMathAbs(LUnaryMathOperation* instr) {
3003 class DeferredMathAbsTaggedHeapNumber:
public LDeferredCode {
3005 DeferredMathAbsTaggedHeapNumber(LCodeGen* codegen,
3006 LUnaryMathOperation* instr)
3007 : LDeferredCode(codegen), instr_(instr) { }
3008 virtual void Generate() {
3009 codegen()->DoDeferredMathAbsTaggedHeapNumber(instr_);
3011 virtual LInstruction* instr() {
return instr_; }
3013 LUnaryMathOperation* instr_;
3016 ASSERT(instr->value()->Equals(instr->result()));
3017 Representation r = instr->hydrogen()->value()->representation();
3020 XMMRegister scratch =
xmm0;
3021 XMMRegister input_reg = ToDoubleRegister(instr->value());
3022 __ xorps(scratch, scratch);
3023 __ subsd(scratch, input_reg);
3024 __ pand(input_reg, scratch);
3025 }
else if (r.IsInteger32()) {
3026 EmitIntegerMathAbs(instr);
3028 DeferredMathAbsTaggedHeapNumber* deferred =
3029 new(zone()) DeferredMathAbsTaggedHeapNumber(
this, instr);
3030 Register input_reg =
ToRegister(instr->value());
3032 __ JumpIfNotSmi(input_reg, deferred->entry());
3033 EmitIntegerMathAbs(instr);
3034 __ bind(deferred->exit());
3039 void LCodeGen::DoMathFloor(LUnaryMathOperation* instr) {
3040 XMMRegister xmm_scratch =
xmm0;
3041 Register output_reg =
ToRegister(instr->result());
3042 XMMRegister input_reg = ToDoubleRegister(instr->value());
3045 CpuFeatures::Scope scope(
SSE4_1);
3049 __ xorps(xmm_scratch, xmm_scratch);
3050 __ ucomisd(input_reg, xmm_scratch);
3052 __ movmskpd(output_reg, input_reg);
3053 __ test(output_reg, Immediate(1));
3054 DeoptimizeIf(
not_zero, instr->environment());
3058 __ cvttsd2si(output_reg, Operand(xmm_scratch));
3060 __ cmp(output_reg, 0x80000000u);
3061 DeoptimizeIf(
equal, instr->environment());
3063 Label negative_sign;
3066 __ xorps(xmm_scratch, xmm_scratch);
3067 __ ucomisd(input_reg, xmm_scratch);
3069 __ j(
below, &negative_sign, Label::kNear);
3073 Label positive_sign;
3074 __ j(
above, &positive_sign, Label::kNear);
3075 __ movmskpd(output_reg, input_reg);
3076 __ test(output_reg, Immediate(1));
3077 DeoptimizeIf(
not_zero, instr->environment());
3078 __ Set(output_reg, Immediate(0));
3079 __ jmp(&done, Label::kNear);
3080 __ bind(&positive_sign);
3084 __ cvttsd2si(output_reg, Operand(input_reg));
3086 __ cmp(output_reg, 0x80000000u);
3087 DeoptimizeIf(
equal, instr->environment());
3088 __ jmp(&done, Label::kNear);
3091 __ bind(&negative_sign);
3093 __ cvttsd2si(output_reg, Operand(input_reg));
3094 __ cvtsi2sd(xmm_scratch, output_reg);
3095 __ ucomisd(input_reg, xmm_scratch);
3096 __ j(
equal, &done, Label::kNear);
3097 __ sub(output_reg, Immediate(1));
3098 DeoptimizeIf(
overflow, instr->environment());
3104 void LCodeGen::DoMathRound(LUnaryMathOperation* instr) {
3105 XMMRegister xmm_scratch =
xmm0;
3106 Register output_reg =
ToRegister(instr->result());
3107 XMMRegister input_reg = ToDoubleRegister(instr->value());
3109 Label below_half, done;
3111 ExternalReference one_half = ExternalReference::address_of_one_half();
3112 __ movdbl(xmm_scratch, Operand::StaticVariable(one_half));
3113 __ ucomisd(xmm_scratch, input_reg);
3116 __ addsd(xmm_scratch, input_reg);
3120 __ cvttsd2si(output_reg, Operand(xmm_scratch));
3123 __ cmp(output_reg, 0x80000000u);
3124 DeoptimizeIf(
equal, instr->environment());
3127 __ bind(&below_half);
3133 __ movmskpd(output_reg, input_reg);
3134 __ test(output_reg, Immediate(1));
3135 DeoptimizeIf(
not_zero, instr->environment());
3138 __ mov(output_reg, Immediate(0xBF000000));
3139 __ movd(xmm_scratch, Operand(output_reg));
3140 __ cvtss2sd(xmm_scratch, xmm_scratch);
3141 __ ucomisd(input_reg, xmm_scratch);
3142 DeoptimizeIf(
below, instr->environment());
3144 __ Set(output_reg, Immediate(0));
3149 void LCodeGen::DoMathSqrt(LUnaryMathOperation* instr) {
3150 XMMRegister input_reg = ToDoubleRegister(instr->value());
3151 ASSERT(ToDoubleRegister(instr->result()).is(input_reg));
3152 __ sqrtsd(input_reg, input_reg);
3156 void LCodeGen::DoMathPowHalf(LMathPowHalf* instr) {
3157 XMMRegister xmm_scratch =
xmm0;
3158 XMMRegister input_reg = ToDoubleRegister(instr->value());
3159 Register scratch =
ToRegister(instr->temp());
3160 ASSERT(ToDoubleRegister(instr->result()).is(input_reg));
3168 __ mov(scratch, 0xFF800000);
3169 __ movd(xmm_scratch, scratch);
3170 __ cvtss2sd(xmm_scratch, xmm_scratch);
3171 __ ucomisd(input_reg, xmm_scratch);
3175 __ j(
carry, &sqrt, Label::kNear);
3177 __ xorps(input_reg, input_reg);
3178 __ subsd(input_reg, xmm_scratch);
3179 __ jmp(&done, Label::kNear);
3183 __ xorps(xmm_scratch, xmm_scratch);
3184 __ addsd(input_reg, xmm_scratch);
3185 __ sqrtsd(input_reg, input_reg);
3190 void LCodeGen::DoPower(LPower* instr) {
3191 Representation exponent_type = instr->hydrogen()->right()->representation();
3194 ASSERT(!instr->InputAt(1)->IsDoubleRegister() ||
3195 ToDoubleRegister(instr->InputAt(1)).is(
xmm1));
3196 ASSERT(!instr->InputAt(1)->IsRegister() ||
3198 ASSERT(ToDoubleRegister(instr->InputAt(0)).is(
xmm2));
3199 ASSERT(ToDoubleRegister(instr->result()).is(
xmm3));
3201 if (exponent_type.IsTagged()) {
3203 __ JumpIfSmi(
eax, &no_deopt);
3205 DeoptimizeIf(
not_equal, instr->environment());
3209 }
else if (exponent_type.IsInteger32()) {
3213 ASSERT(exponent_type.IsDouble());
3220 void LCodeGen::DoRandom(LRandom* instr) {
3221 class DeferredDoRandom:
public LDeferredCode {
3223 DeferredDoRandom(LCodeGen* codegen, LRandom* instr)
3224 : LDeferredCode(codegen), instr_(instr) { }
3225 virtual void Generate() { codegen()->DoDeferredRandom(instr_); }
3226 virtual LInstruction* instr() {
return instr_; }
3231 DeferredDoRandom* deferred =
new(zone()) DeferredDoRandom(
this, instr);
3235 ASSERT(ToDoubleRegister(instr->result()).is(
xmm1));
3238 static const int kSeedSize =
sizeof(uint32_t);
3242 static const int kRandomSeedOffset =
3251 __ j(
zero, deferred->entry());
3275 __ and_(
eax, Immediate(0x3FFFF));
3278 __ bind(deferred->exit());
3282 __ mov(
ebx, Immediate(0x49800000));
3291 void LCodeGen::DoDeferredRandom(LRandom* instr) {
3292 __ PrepareCallCFunction(1,
ebx);
3294 __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1);
3299 void LCodeGen::DoMathLog(LUnaryMathOperation* instr) {
3300 ASSERT(instr->value()->Equals(instr->result()));
3301 XMMRegister input_reg = ToDoubleRegister(instr->value());
3304 __ ucomisd(input_reg,
xmm0);
3305 __ j(
above, &positive, Label::kNear);
3306 __ j(
equal, &zero, Label::kNear);
3307 ExternalReference nan =
3308 ExternalReference::address_of_canonical_non_hole_nan();
3309 __ movdbl(input_reg, Operand::StaticVariable(nan));
3310 __ jmp(&done, Label::kNear);
3312 __ push(Immediate(0xFFF00000));
3313 __ push(Immediate(0));
3314 __ movdbl(input_reg, Operand(
esp, 0));
3316 __ jmp(&done, Label::kNear);
3320 __ movdbl(Operand(
esp, 0), input_reg);
3321 __ fld_d(Operand(
esp, 0));
3323 __ fstp_d(Operand(
esp, 0));
3324 __ movdbl(input_reg, Operand(
esp, 0));
3330 void LCodeGen::DoMathTan(LUnaryMathOperation* instr) {
3331 ASSERT(ToDoubleRegister(instr->result()).is(
xmm1));
3334 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3338 void LCodeGen::DoMathCos(LUnaryMathOperation* instr) {
3339 ASSERT(ToDoubleRegister(instr->result()).is(
xmm1));
3342 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3346 void LCodeGen::DoMathSin(LUnaryMathOperation* instr) {
3347 ASSERT(ToDoubleRegister(instr->result()).is(
xmm1));
3350 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3354 void LCodeGen::DoUnaryMathOperation(LUnaryMathOperation* instr) {
3355 switch (instr->op()) {
3387 void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) {
3390 ASSERT(instr->HasPointerMap());
3392 if (instr->known_function().is_null()) {
3393 LPointerMap* pointers = instr->pointer_map();
3394 RecordPosition(pointers->position());
3395 SafepointGenerator generator(
3396 this, pointers, Safepoint::kLazyDeopt);
3397 ParameterCount count(instr->arity());
3400 CallKnownFunction(instr->known_function(),
3404 EDI_CONTAINS_TARGET);
3409 void LCodeGen::DoCallKeyed(LCallKeyed* instr) {
3414 int arity = instr->arity();
3416 isolate()->stub_cache()->ComputeKeyedCallInitialize(arity);
3417 CallCode(ic, RelocInfo::CODE_TARGET, instr);
3421 void LCodeGen::DoCallNamed(LCallNamed* instr) {
3425 int arity = instr->arity();
3426 RelocInfo::Mode mode = RelocInfo::CODE_TARGET;
3428 isolate()->stub_cache()->ComputeCallInitialize(arity, mode);
3429 __ mov(
ecx, instr->name());
3430 CallCode(ic, mode, instr);
3434 void LCodeGen::DoCallFunction(LCallFunction* instr) {
3439 int arity = instr->arity();
3441 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3445 void LCodeGen::DoCallGlobal(LCallGlobal* instr) {
3449 int arity = instr->arity();
3450 RelocInfo::Mode mode = RelocInfo::CODE_TARGET_CONTEXT;
3452 isolate()->stub_cache()->ComputeCallInitialize(arity, mode);
3453 __ mov(
ecx, instr->name());
3454 CallCode(ic, mode, instr);
3458 void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) {
3460 CallKnownFunction(instr->target(),
3468 void LCodeGen::DoCallNew(LCallNew* instr) {
3474 __ Set(
eax, Immediate(instr->arity()));
3475 CallCode(stub.GetCode(), RelocInfo::CONSTRUCT_CALL, instr);
3479 void LCodeGen::DoCallRuntime(LCallRuntime* instr) {
3480 CallRuntime(instr->function(), instr->arity(), instr);
3484 void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) {
3485 Register
object =
ToRegister(instr->object());
3487 int offset = instr->offset();
3489 if (!instr->transition().is_null()) {
3490 if (!instr->hydrogen()->NeedsWriteBarrierForMap()) {
3493 Register temp =
ToRegister(instr->TempAt(0));
3494 Register temp_map =
ToRegister(instr->TempAt(1));
3495 __ mov(temp_map, instr->transition());
3498 __ RecordWriteField(
object,
3509 HType type = instr->hydrogen()->value()->type();
3512 if (instr->is_in_object()) {
3514 if (instr->hydrogen()->NeedsWriteBarrier()) {
3515 Register temp =
ToRegister(instr->TempAt(0));
3517 __ RecordWriteField(
object,
3526 Register temp =
ToRegister(instr->TempAt(0));
3529 if (instr->hydrogen()->NeedsWriteBarrier()) {
3532 __ RecordWriteField(temp,
3544 void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) {
3549 __ mov(
ecx, instr->name());
3550 Handle<Code> ic = (instr->strict_mode_flag() ==
kStrictMode)
3551 ? isolate()->builtins()->StoreIC_Initialize_Strict()
3552 : isolate()->builtins()->StoreIC_Initialize();
3553 CallCode(ic, RelocInfo::CODE_TARGET, instr);
3557 void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) {
3558 if (instr->index()->IsConstantOperand()) {
3559 __ cmp(ToOperand(instr->length()),
3563 __ cmp(
ToRegister(instr->index()), ToOperand(instr->length()));
3569 void LCodeGen::DoStoreKeyedSpecializedArrayElement(
3570 LStoreKeyedSpecializedArrayElement* instr) {
3572 Operand operand(BuildFastArrayOperand(instr->external_pointer(),
3576 instr->additional_index()));
3578 __ cvtsd2ss(
xmm0, ToDoubleRegister(instr->value()));
3581 __ movdbl(operand, ToDoubleRegister(instr->value()));
3584 switch (elements_kind) {
3588 __ mov_b(operand, value);
3592 __ mov_w(operand, value);
3596 __ mov(operand, value);
3615 void LCodeGen::DoStoreKeyedFastElement(LStoreKeyedFastElement* instr) {
3617 Register elements =
ToRegister(instr->object());
3618 Register key = instr->key()->IsRegister() ?
ToRegister(instr->key()) :
no_reg;
3620 Operand operand = BuildFastArrayOperand(
3625 instr->additional_index());
3626 __ mov(operand, value);
3628 if (instr->hydrogen()->NeedsWriteBarrier()) {
3629 ASSERT(!instr->key()->IsConstantOperand());
3630 HType type = instr->hydrogen()->value()->type();
3634 __ lea(key, operand);
3635 __ RecordWrite(elements,
3645 void LCodeGen::DoStoreKeyedFastDoubleElement(
3646 LStoreKeyedFastDoubleElement* instr) {
3647 XMMRegister value = ToDoubleRegister(instr->value());
3649 if (instr->NeedsCanonicalization()) {
3652 __ ucomisd(value, value);
3655 ExternalReference canonical_nan_reference =
3656 ExternalReference::address_of_canonical_non_hole_nan();
3657 __ movdbl(value, Operand::StaticVariable(canonical_nan_reference));
3658 __ bind(&have_value);
3661 Operand double_store_operand = BuildFastArrayOperand(
3666 instr->additional_index());
3667 __ movdbl(double_store_operand, value);
3671 void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) {
3677 Handle<Code> ic = (instr->strict_mode_flag() ==
kStrictMode)
3678 ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
3679 : isolate()->builtins()->KeyedStoreIC_Initialize();
3680 CallCode(ic, RelocInfo::CODE_TARGET, instr);
3684 void LCodeGen::DoTransitionElementsKind(LTransitionElementsKind* instr) {
3685 Register object_reg =
ToRegister(instr->object());
3686 Register new_map_reg =
ToRegister(instr->new_map_reg());
3688 Handle<Map> from_map = instr->original_map();
3689 Handle<Map> to_map = instr->transitioned_map();
3693 Label not_applicable;
3694 bool is_simple_map_transition =
3696 Label::Distance branch_distance =
3697 is_simple_map_transition ? Label::kNear : Label::kFar;
3699 __ j(
not_equal, ¬_applicable, branch_distance);
3700 if (is_simple_map_transition) {
3701 Register object_reg =
ToRegister(instr->object());
3702 Handle<Map> map = instr->hydrogen()->transitioned_map();
3707 __ RecordWriteForMap(object_reg, to_map, new_map_reg,
3712 __ mov(new_map_reg, to_map);
3713 Register fixed_object_reg =
ToRegister(instr->temp_reg());
3716 __ mov(fixed_object_reg, object_reg);
3717 CallCode(isolate()->builtins()->TransitionElementsSmiToDouble(),
3718 RelocInfo::CODE_TARGET, instr);
3721 __ mov(new_map_reg, to_map);
3722 Register fixed_object_reg =
ToRegister(instr->temp_reg());
3725 __ mov(fixed_object_reg, object_reg);
3726 CallCode(isolate()->builtins()->TransitionElementsDoubleToObject(),
3727 RelocInfo::CODE_TARGET, instr);
3731 __ bind(¬_applicable);
3735 void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) {
3736 class DeferredStringCharCodeAt:
public LDeferredCode {
3738 DeferredStringCharCodeAt(LCodeGen* codegen, LStringCharCodeAt* instr)
3739 : LDeferredCode(codegen), instr_(instr) { }
3740 virtual void Generate() { codegen()->DoDeferredStringCharCodeAt(instr_); }
3741 virtual LInstruction* instr() {
return instr_; }
3743 LStringCharCodeAt* instr_;
3746 DeferredStringCharCodeAt* deferred =
3747 new(zone()) DeferredStringCharCodeAt(
this, instr);
3755 __ bind(deferred->exit());
3759 void LCodeGen::DoDeferredStringCharCodeAt(LStringCharCodeAt* instr) {
3760 Register
string =
ToRegister(instr->string());
3761 Register result =
ToRegister(instr->result());
3766 __ Set(result, Immediate(0));
3768 PushSafepointRegistersScope scope(
this);
3773 if (instr->index()->IsConstantOperand()) {
3781 CallRuntimeFromDeferred(Runtime::kStringCharCodeAt, 2,
3782 instr, instr->context());
3783 if (FLAG_debug_code) {
3784 __ AbortIfNotSmi(
eax);
3787 __ StoreToSafepointRegisterSlot(result,
eax);
3791 void LCodeGen::DoStringCharFromCode(LStringCharFromCode* instr) {
3792 class DeferredStringCharFromCode:
public LDeferredCode {
3794 DeferredStringCharFromCode(LCodeGen* codegen, LStringCharFromCode* instr)
3795 : LDeferredCode(codegen), instr_(instr) { }
3796 virtual void Generate() { codegen()->DoDeferredStringCharFromCode(instr_); }
3797 virtual LInstruction* instr() {
return instr_; }
3799 LStringCharFromCode* instr_;
3802 DeferredStringCharFromCode* deferred =
3803 new(zone()) DeferredStringCharFromCode(
this, instr);
3805 ASSERT(instr->hydrogen()->value()->representation().IsInteger32());
3806 Register char_code =
ToRegister(instr->char_code());
3807 Register result =
ToRegister(instr->result());
3808 ASSERT(!char_code.is(result));
3811 __ j(
above, deferred->entry());
3812 __ Set(result, Immediate(factory()->single_character_string_cache()));
3816 __ cmp(result, factory()->undefined_value());
3817 __ j(
equal, deferred->entry());
3818 __ bind(deferred->exit());
3822 void LCodeGen::DoDeferredStringCharFromCode(LStringCharFromCode* instr) {
3823 Register char_code =
ToRegister(instr->char_code());
3824 Register result =
ToRegister(instr->result());
3829 __ Set(result, Immediate(0));
3831 PushSafepointRegistersScope scope(
this);
3832 __ SmiTag(char_code);
3834 CallRuntimeFromDeferred(Runtime::kCharFromCode, 1, instr, instr->context());
3835 __ StoreToSafepointRegisterSlot(result,
eax);
3839 void LCodeGen::DoStringLength(LStringLength* instr) {
3840 Register
string =
ToRegister(instr->string());
3841 Register result =
ToRegister(instr->result());
3846 void LCodeGen::DoStringAdd(LStringAdd* instr) {
3847 EmitPushTaggedOperand(instr->left());
3848 EmitPushTaggedOperand(instr->right());
3850 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3854 void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) {
3855 LOperand* input = instr->InputAt(0);
3856 ASSERT(input->IsRegister() || input->IsStackSlot());
3857 LOperand* output = instr->result();
3858 ASSERT(output->IsDoubleRegister());
3859 __ cvtsi2sd(ToDoubleRegister(output), ToOperand(input));
3863 void LCodeGen::DoNumberTagI(LNumberTagI* instr) {
3864 class DeferredNumberTagI:
public LDeferredCode {
3866 DeferredNumberTagI(LCodeGen* codegen, LNumberTagI* instr)
3867 : LDeferredCode(codegen), instr_(instr) { }
3868 virtual void Generate() { codegen()->DoDeferredNumberTagI(instr_); }
3869 virtual LInstruction* instr() {
return instr_; }
3871 LNumberTagI* instr_;
3874 LOperand* input = instr->InputAt(0);
3875 ASSERT(input->IsRegister() && input->Equals(instr->result()));
3878 DeferredNumberTagI* deferred =
new(zone()) DeferredNumberTagI(
this, instr);
3881 __ bind(deferred->exit());
3885 void LCodeGen::DoDeferredNumberTagI(LNumberTagI* instr) {
3887 Register reg =
ToRegister(instr->InputAt(0));
3891 PushSafepointRegistersScope scope(
this);
3898 __ xor_(reg, 0x80000000);
3899 __ cvtsi2sd(
xmm0, Operand(reg));
3900 if (FLAG_inline_new) {
3901 __ AllocateHeapNumber(reg, tmp,
no_reg, &slow);
3902 __ jmp(&done, Label::kNear);
3911 __ StoreToSafepointRegisterSlot(reg, Immediate(0));
3918 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber);
3919 RecordSafepointWithRegisters(
3920 instr->pointer_map(), 0, Safepoint::kNoLazyDeopt);
3921 if (!reg.is(
eax))
__ mov(reg,
eax);
3927 __ StoreToSafepointRegisterSlot(reg, reg);
3931 void LCodeGen::DoNumberTagD(LNumberTagD* instr) {
3932 class DeferredNumberTagD:
public LDeferredCode {
3934 DeferredNumberTagD(LCodeGen* codegen, LNumberTagD* instr)
3935 : LDeferredCode(codegen), instr_(instr) { }
3936 virtual void Generate() { codegen()->DoDeferredNumberTagD(instr_); }
3937 virtual LInstruction* instr() {
return instr_; }
3939 LNumberTagD* instr_;
3942 XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
3946 DeferredNumberTagD* deferred =
new(zone()) DeferredNumberTagD(
this, instr);
3947 if (FLAG_inline_new) {
3948 __ AllocateHeapNumber(reg, tmp,
no_reg, deferred->entry());
3950 __ jmp(deferred->entry());
3952 __ bind(deferred->exit());
3957 void LCodeGen::DoDeferredNumberTagD(LNumberTagD* instr) {
3962 __ Set(reg, Immediate(0));
3964 PushSafepointRegistersScope scope(
this);
3971 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber);
3972 RecordSafepointWithRegisters(
3973 instr->pointer_map(), 0, Safepoint::kNoLazyDeopt);
3974 __ StoreToSafepointRegisterSlot(reg,
eax);
3978 void LCodeGen::DoSmiTag(LSmiTag* instr) {
3979 LOperand* input = instr->InputAt(0);
3980 ASSERT(input->IsRegister() && input->Equals(instr->result()));
3986 void LCodeGen::DoSmiUntag(LSmiUntag* instr) {
3987 LOperand* input = instr->InputAt(0);
3988 ASSERT(input->IsRegister() && input->Equals(instr->result()));
3989 if (instr->needs_check()) {
3991 DeoptimizeIf(
not_zero, instr->environment());
3993 if (FLAG_debug_code) {
4001 void LCodeGen::EmitNumberUntagD(Register input_reg,
4003 XMMRegister result_reg,
4004 bool deoptimize_on_undefined,
4005 bool deoptimize_on_minus_zero,
4006 LEnvironment* env) {
4007 Label load_smi, done;
4010 __ JumpIfSmi(input_reg, &load_smi, Label::kNear);
4014 factory()->heap_number_map());
4015 if (deoptimize_on_undefined) {
4019 __ j(
equal, &heap_number, Label::kNear);
4021 __ cmp(input_reg, factory()->undefined_value());
4025 ExternalReference nan =
4026 ExternalReference::address_of_canonical_non_hole_nan();
4027 __ movdbl(result_reg, Operand::StaticVariable(nan));
4028 __ jmp(&done, Label::kNear);
4030 __ bind(&heap_number);
4034 if (deoptimize_on_minus_zero) {
4035 XMMRegister xmm_scratch =
xmm0;
4036 __ xorps(xmm_scratch, xmm_scratch);
4037 __ ucomisd(result_reg, xmm_scratch);
4039 __ movmskpd(temp_reg, result_reg);
4040 __ test_b(temp_reg, 1);
4043 __ jmp(&done, Label::kNear);
4047 __ SmiUntag(input_reg);
4048 __ cvtsi2sd(result_reg, Operand(input_reg));
4049 __ SmiTag(input_reg);
4054 void LCodeGen::DoDeferredTaggedToI(LTaggedToI* instr) {
4055 Label done, heap_number;
4056 Register input_reg =
ToRegister(instr->InputAt(0));
4060 factory()->heap_number_map());
4062 if (instr->truncating()) {
4063 __ j(
equal, &heap_number, Label::kNear);
4066 __ cmp(input_reg, factory()->undefined_value());
4067 DeoptimizeIf(
not_equal, instr->environment());
4068 __ mov(input_reg, 0);
4069 __ jmp(&done, Label::kNear);
4071 __ bind(&heap_number);
4073 CpuFeatures::Scope scope(
SSE3);
4081 const uint32_t kTooBigExponent =
4083 __ cmp(Operand(input_reg), Immediate(kTooBigExponent));
4084 __ j(
less, &convert, Label::kNear);
4093 __ fisttp_d(Operand(
esp, 0));
4094 __ mov(input_reg, Operand(
esp, 0));
4097 XMMRegister xmm_temp = ToDoubleRegister(instr->TempAt(0));
4099 __ cvttsd2si(input_reg, Operand(
xmm0));
4100 __ cmp(input_reg, 0x80000000u);
4104 ExternalReference min_int = ExternalReference::address_of_min_int();
4105 __ movdbl(xmm_temp, Operand::StaticVariable(min_int));
4106 __ ucomisd(xmm_temp,
xmm0);
4107 DeoptimizeIf(
not_equal, instr->environment());
4112 DeoptimizeIf(
not_equal, instr->environment());
4114 XMMRegister xmm_temp = ToDoubleRegister(instr->TempAt(0));
4116 __ cvttsd2si(input_reg, Operand(
xmm0));
4117 __ cvtsi2sd(xmm_temp, Operand(input_reg));
4118 __ ucomisd(
xmm0, xmm_temp);
4119 DeoptimizeIf(
not_equal, instr->environment());
4122 __ test(input_reg, Operand(input_reg));
4124 __ movmskpd(input_reg,
xmm0);
4125 __ and_(input_reg, 1);
4126 DeoptimizeIf(
not_zero, instr->environment());
4133 void LCodeGen::DoTaggedToI(LTaggedToI* instr) {
4134 class DeferredTaggedToI:
public LDeferredCode {
4136 DeferredTaggedToI(LCodeGen* codegen, LTaggedToI* instr)
4137 : LDeferredCode(codegen), instr_(instr) { }
4138 virtual void Generate() { codegen()->DoDeferredTaggedToI(instr_); }
4139 virtual LInstruction* instr() {
return instr_; }
4144 LOperand* input = instr->InputAt(0);
4145 ASSERT(input->IsRegister());
4146 ASSERT(input->Equals(instr->result()));
4150 DeferredTaggedToI* deferred =
new(zone()) DeferredTaggedToI(
this, instr);
4153 __ JumpIfNotSmi(input_reg, deferred->entry());
4156 __ SmiUntag(input_reg);
4158 __ bind(deferred->exit());
4162 void LCodeGen::DoNumberUntagD(LNumberUntagD* instr) {
4163 LOperand* input = instr->InputAt(0);
4164 ASSERT(input->IsRegister());
4165 LOperand* temp = instr->TempAt(0);
4167 LOperand* result = instr->result();
4168 ASSERT(result->IsDoubleRegister());
4171 XMMRegister result_reg = ToDoubleRegister(result);
4173 bool deoptimize_on_minus_zero =
4174 instr->hydrogen()->deoptimize_on_minus_zero();
4177 EmitNumberUntagD(input_reg,
4180 instr->hydrogen()->deoptimize_on_undefined(),
4181 deoptimize_on_minus_zero,
4182 instr->environment());
4186 void LCodeGen::DoDoubleToI(LDoubleToI* instr) {
4187 LOperand* input = instr->InputAt(0);
4188 ASSERT(input->IsDoubleRegister());
4189 LOperand* result = instr->result();
4190 ASSERT(result->IsRegister());
4192 XMMRegister input_reg = ToDoubleRegister(input);
4195 if (instr->truncating()) {
4198 __ cvttsd2si(result_reg, Operand(input_reg));
4199 __ cmp(result_reg, 0x80000000u);
4202 CpuFeatures::Scope scope(
SSE3);
4203 Label convert, done;
4206 __ movdbl(Operand(
esp, 0), input_reg);
4210 const uint32_t kTooBigExponent =
4212 __ cmp(Operand(result_reg), Immediate(kTooBigExponent));
4213 __ j(
less, &convert, Label::kNear);
4218 __ fld_d(Operand(
esp, 0));
4219 __ fisttp_d(Operand(
esp, 0));
4220 __ mov(result_reg, Operand(
esp, 0));
4225 Register temp_reg =
ToRegister(instr->TempAt(0));
4226 XMMRegister xmm_scratch =
xmm0;
4233 __ pshufd(xmm_scratch, input_reg, 1);
4234 __ movd(Operand(temp_reg), xmm_scratch);
4235 __ mov(result_reg, temp_reg);
4247 __ sub(Operand(result_reg),
4253 DeoptimizeIf(
greater, instr->environment());
4259 ExternalReference minus_zero = ExternalReference::address_of_minus_zero();
4262 __ movdbl(xmm_scratch, Operand::StaticVariable(minus_zero));
4264 __ por(input_reg, xmm_scratch);
4268 __ movd(xmm_scratch, Operand(result_reg));
4271 __ psrlq(input_reg, xmm_scratch);
4272 __ movd(Operand(result_reg), input_reg);
4275 __ xor_(result_reg, Operand(temp_reg));
4276 __ sub(result_reg, Operand(temp_reg));
4281 __ cvttsd2si(result_reg, Operand(input_reg));
4282 __ cvtsi2sd(
xmm0, Operand(result_reg));
4283 __ ucomisd(
xmm0, input_reg);
4284 DeoptimizeIf(
not_equal, instr->environment());
4289 __ test(result_reg, Operand(result_reg));
4291 __ movmskpd(result_reg, input_reg);
4295 __ and_(result_reg, 1);
4296 DeoptimizeIf(
not_zero, instr->environment());
4303 void LCodeGen::DoCheckSmi(LCheckSmi* instr) {
4304 LOperand* input = instr->InputAt(0);
4306 DeoptimizeIf(
not_zero, instr->environment());
4310 void LCodeGen::DoCheckNonSmi(LCheckNonSmi* instr) {
4311 LOperand* input = instr->InputAt(0);
4313 DeoptimizeIf(zero, instr->environment());
4317 void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) {
4318 Register input =
ToRegister(instr->InputAt(0));
4319 Register temp =
ToRegister(instr->TempAt(0));
4323 if (instr->hydrogen()->is_interval_check()) {
4326 instr->hydrogen()->GetCheckInterval(&first, &last);
4329 static_cast<int8_t>(first));
4332 if (first == last) {
4333 DeoptimizeIf(
not_equal, instr->environment());
4335 DeoptimizeIf(
below, instr->environment());
4339 static_cast<int8_t>(last));
4340 DeoptimizeIf(
above, instr->environment());
4346 instr->hydrogen()->GetCheckMaskAndTag(&mask, &tag);
4351 DeoptimizeIf(tag == 0 ?
not_zero : zero, instr->environment());
4354 __ and_(temp, mask);
4356 DeoptimizeIf(
not_equal, instr->environment());
4362 void LCodeGen::DoCheckFunction(LCheckFunction* instr) {
4363 Handle<JSFunction> target = instr->hydrogen()->target();
4364 if (isolate()->heap()->InNewSpace(*target)) {
4366 Handle<JSGlobalPropertyCell> cell =
4367 isolate()->factory()->NewJSGlobalPropertyCell(target);
4368 __ cmp(reg, Operand::Cell(cell));
4370 Operand operand = ToOperand(instr->value());
4371 __ cmp(operand, target);
4373 DeoptimizeIf(
not_equal, instr->environment());
4377 void LCodeGen::DoCheckMapCommon(Register reg,
4380 LEnvironment* env) {
4382 __ CompareMap(reg, map, &success, mode);
4388 void LCodeGen::DoCheckMaps(LCheckMaps* instr) {
4389 LOperand* input = instr->InputAt(0);
4390 ASSERT(input->IsRegister());
4394 SmallMapList* map_set = instr->hydrogen()->map_set();
4395 for (
int i = 0; i < map_set->length() - 1; i++) {
4396 Handle<Map> map = map_set->at(i);
4400 Handle<Map> map = map_set->last();
4406 void LCodeGen::DoClampDToUint8(LClampDToUint8* instr) {
4407 XMMRegister value_reg = ToDoubleRegister(instr->unclamped());
4408 Register result_reg =
ToRegister(instr->result());
4409 __ ClampDoubleToUint8(value_reg,
xmm0, result_reg);
4413 void LCodeGen::DoClampIToUint8(LClampIToUint8* instr) {
4414 ASSERT(instr->unclamped()->Equals(instr->result()));
4415 Register value_reg =
ToRegister(instr->result());
4416 __ ClampUint8(value_reg);
4420 void LCodeGen::DoClampTToUint8(LClampTToUint8* instr) {
4421 ASSERT(instr->unclamped()->Equals(instr->result()));
4422 Register input_reg =
ToRegister(instr->unclamped());
4423 Label is_smi, done, heap_number;
4425 __ JumpIfSmi(input_reg, &is_smi);
4429 factory()->heap_number_map());
4430 __ j(
equal, &heap_number, Label::kNear);
4434 __ cmp(input_reg, factory()->undefined_value());
4435 DeoptimizeIf(
not_equal, instr->environment());
4436 __ mov(input_reg, 0);
4437 __ jmp(&done, Label::kNear);
4440 __ bind(&heap_number);
4442 __ ClampDoubleToUint8(
xmm0,
xmm1, input_reg);
4443 __ jmp(&done, Label::kNear);
4447 __ SmiUntag(input_reg);
4448 __ ClampUint8(input_reg);
4454 void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) {
4457 Handle<JSObject> holder = instr->holder();
4458 Handle<JSObject> current_prototype = instr->prototype();
4461 __ LoadHeapObject(reg, current_prototype);
4464 while (!current_prototype.is_identical_to(holder)) {
4465 DoCheckMapCommon(reg, Handle<Map>(current_prototype->map()),
4469 Handle<JSObject>(
JSObject::cast(current_prototype->GetPrototype()));
4471 __ LoadHeapObject(reg, current_prototype);
4475 DoCheckMapCommon(reg, Handle<Map>(current_prototype->map()),
4480 void LCodeGen::DoAllocateObject(LAllocateObject* instr) {
4481 class DeferredAllocateObject:
public LDeferredCode {
4483 DeferredAllocateObject(LCodeGen* codegen, LAllocateObject* instr)
4484 : LDeferredCode(codegen), instr_(instr) { }
4485 virtual void Generate() { codegen()->DoDeferredAllocateObject(instr_); }
4486 virtual LInstruction* instr() {
return instr_; }
4488 LAllocateObject* instr_;
4491 DeferredAllocateObject* deferred =
4492 new(zone()) DeferredAllocateObject(
this, instr);
4494 Register result =
ToRegister(instr->result());
4495 Register scratch =
ToRegister(instr->TempAt(0));
4496 Handle<JSFunction> constructor = instr->hydrogen()->constructor();
4497 Handle<Map> initial_map(constructor->initial_map());
4498 int instance_size = initial_map->instance_size();
4499 ASSERT(initial_map->pre_allocated_property_fields() +
4500 initial_map->unused_property_fields() -
4501 initial_map->inobject_properties() == 0);
4506 ASSERT(!constructor->shared()->IsInobjectSlackTrackingInProgress());
4507 __ AllocateInNewSpace(instance_size,
4514 __ bind(deferred->exit());
4515 if (FLAG_debug_code) {
4516 Label is_in_new_space;
4517 __ JumpIfInNewSpace(result, scratch, &is_in_new_space);
4518 __ Abort(
"Allocated object is not in new-space");
4519 __ bind(&is_in_new_space);
4523 Register map = scratch;
4524 __ LoadHeapObject(scratch, constructor);
4527 if (FLAG_debug_code) {
4531 __ Assert(
equal,
"Unexpected instance size");
4533 initial_map->pre_allocated_property_fields());
4534 __ Assert(
equal,
"Unexpected pre-allocated property fields count");
4536 initial_map->unused_property_fields());
4537 __ Assert(
equal,
"Unexpected unused property fields count");
4539 initial_map->inobject_properties());
4540 __ Assert(
equal,
"Unexpected in-object property fields count");
4546 __ mov(scratch, factory()->empty_fixed_array());
4549 if (initial_map->inobject_properties() != 0) {
4550 __ mov(scratch, factory()->undefined_value());
4551 for (
int i = 0; i < initial_map->inobject_properties(); i++) {
4559 void LCodeGen::DoDeferredAllocateObject(LAllocateObject* instr) {
4560 Register result =
ToRegister(instr->result());
4561 Handle<JSFunction> constructor = instr->hydrogen()->constructor();
4562 Handle<Map> initial_map(constructor->initial_map());
4563 int instance_size = initial_map->instance_size();
4568 __ Set(result, Immediate(0));
4570 PushSafepointRegistersScope scope(
this);
4572 CallRuntimeFromDeferred(
4573 Runtime::kAllocateInNewSpace, 1, instr, instr->context());
4574 __ StoreToSafepointRegisterSlot(result,
eax);
4578 void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
4580 Heap* heap = isolate()->heap();
4582 instr->hydrogen()->boilerplate_elements_kind();
4588 boilerplate_elements_kind,
true)) {
4589 __ LoadHeapObject(
eax, instr->hydrogen()->boilerplate_object());
4597 DeoptimizeIf(
not_equal, instr->environment());
4603 __ push(Immediate(
Smi::FromInt(instr->hydrogen()->literal_index())));
4606 __ push(Immediate(Handle<FixedArray>(heap->empty_fixed_array())));
4609 int length = instr->hydrogen()->length();
4610 if (instr->hydrogen()->IsCopyOnWrite()) {
4611 ASSERT(instr->hydrogen()->depth() == 1);
4614 FastCloneShallowArrayStub stub(mode, length);
4615 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
4616 }
else if (instr->hydrogen()->depth() > 1) {
4617 CallRuntime(Runtime::kCreateArrayLiteral, 3, instr);
4619 CallRuntime(Runtime::kCreateArrayLiteralShallow, 3, instr);
4625 FastCloneShallowArrayStub stub(mode, length);
4626 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
4631 void LCodeGen::EmitDeepCopy(Handle<JSObject>
object,
4638 if (FLAG_debug_code) {
4639 __ LoadHeapObject(
ecx,
object);
4640 __ cmp(source,
ecx);
4641 __ Assert(
equal,
"Unexpected object literal boilerplate");
4643 __ cmp(
ecx, Handle<Map>(object->map()));
4644 __ Assert(
equal,
"Unexpected boilerplate map");
4648 __ Assert(
equal,
"Unexpected boilerplate elements kind");
4652 Handle<FixedArrayBase> elements(object->elements());
4653 bool has_elements = elements->length() > 0 &&
4654 elements->map() != isolate()->heap()->fixed_cow_array_map();
4658 int object_offset = *offset;
4659 int object_size =
object->map()->instance_size();
4660 int elements_offset = *offset + object_size;
4661 int elements_size = has_elements ? elements->Size() : 0;
4662 *offset += object_size + elements_size;
4665 ASSERT(object->properties()->length() == 0);
4666 int inobject_properties =
object->map()->inobject_properties();
4667 int header_size = object_size - inobject_properties *
kPointerSize;
4670 __ lea(
ecx, Operand(result, elements_offset));
4678 for (
int i = 0; i < inobject_properties; i++) {
4679 int total_offset = object_offset +
object->GetInObjectPropertyOffset(i);
4680 Handle<Object> value = Handle<Object>(
object->InObjectPropertyAt(i));
4681 if (value->IsJSObject()) {
4683 __ lea(
ecx, Operand(result, *offset));
4685 __ LoadHeapObject(source, value_object);
4686 EmitDeepCopy(value_object, result, source, offset);
4687 }
else if (value->IsHeapObject()) {
4688 __ LoadHeapObject(
ecx, Handle<HeapObject>::cast(value));
4697 __ LoadHeapObject(source, elements);
4704 int elements_length = elements->length();
4705 if (elements->IsFixedDoubleArray()) {
4706 Handle<FixedDoubleArray> double_array =
4708 for (
int i = 0; i < elements_length; i++) {
4709 int64_t value = double_array->get_representation(i);
4710 int32_t value_low = value & 0xFFFFFFFF;
4711 int32_t value_high = value >> 32;
4714 __ mov(
FieldOperand(result, total_offset), Immediate(value_low));
4715 __ mov(
FieldOperand(result, total_offset + 4), Immediate(value_high));
4717 }
else if (elements->IsFixedArray()) {
4719 for (
int i = 0; i < elements_length; i++) {
4721 Handle<Object> value(fast_elements->get(i));
4722 if (value->IsJSObject()) {
4724 __ lea(
ecx, Operand(result, *offset));
4726 __ LoadHeapObject(source, value_object);
4727 EmitDeepCopy(value_object, result, source, offset);
4728 }
else if (value->IsHeapObject()) {
4729 __ LoadHeapObject(
ecx, Handle<HeapObject>::cast(value));
4742 void LCodeGen::DoFastLiteral(LFastLiteral* instr) {
4744 int size = instr->hydrogen()->total_size();
4746 instr->hydrogen()->boilerplate()->GetElementsKind();
4752 boilerplate_elements_kind,
true)) {
4753 __ LoadHeapObject(
ebx, instr->hydrogen()->boilerplate());
4761 DeoptimizeIf(
not_equal, instr->environment());
4766 Label allocated, runtime_allocate;
4770 __ bind(&runtime_allocate);
4772 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr);
4774 __ bind(&allocated);
4776 __ LoadHeapObject(
ebx, instr->hydrogen()->boilerplate());
4777 EmitDeepCopy(instr->hydrogen()->boilerplate(),
eax,
ebx, &offset);
4782 void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) {
4784 Handle<FixedArray> literals(instr->environment()->closure()->literals());
4785 Handle<FixedArray> constant_properties =
4786 instr->hydrogen()->constant_properties();
4789 __ PushHeapObject(literals);
4790 __ push(Immediate(
Smi::FromInt(instr->hydrogen()->literal_index())));
4791 __ push(Immediate(constant_properties));
4792 int flags = instr->hydrogen()->fast_elements()
4795 flags |= instr->hydrogen()->has_function()
4801 int properties_count = constant_properties->length() / 2;
4802 if (instr->hydrogen()->depth() > 1) {
4803 CallRuntime(Runtime::kCreateObjectLiteral, 4, instr);
4806 CallRuntime(Runtime::kCreateObjectLiteralShallow, 4, instr);
4808 FastCloneShallowObjectStub stub(properties_count);
4809 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
4814 void LCodeGen::DoToFastProperties(LToFastProperties* instr) {
4817 CallRuntime(Runtime::kToFastProperties, 1, instr);
4821 void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) {
4832 int literal_offset = FixedArray::kHeaderSize +
4835 __ cmp(
ebx, factory()->undefined_value());
4841 __ push(Immediate(
Smi::FromInt(instr->hydrogen()->literal_index())));
4842 __ push(Immediate(instr->hydrogen()->pattern()));
4843 __ push(Immediate(instr->hydrogen()->flags()));
4844 CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr);
4847 __ bind(&materialized);
4849 Label allocated, runtime_allocate;
4853 __ bind(&runtime_allocate);
4856 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr);
4859 __ bind(&allocated);
4868 if ((size % (2 * kPointerSize)) != 0) {
4875 void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) {
4879 Handle<SharedFunctionInfo> shared_info = instr->shared_info();
4880 bool pretenure = instr->hydrogen()->pretenure();
4881 if (!pretenure && shared_info->num_literals() == 0) {
4882 FastNewClosureStub stub(shared_info->language_mode());
4883 __ push(Immediate(shared_info));
4884 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
4887 __ push(Immediate(shared_info));
4888 __ push(Immediate(pretenure
4889 ? factory()->true_value()
4890 : factory()->false_value()));
4891 CallRuntime(Runtime::kNewClosure, 3, instr);
4896 void LCodeGen::DoTypeof(LTypeof* instr) {
4897 LOperand* input = instr->InputAt(1);
4898 EmitPushTaggedOperand(input);
4899 CallRuntime(Runtime::kTypeof, 1, instr);
4903 void LCodeGen::DoTypeofIsAndBranch(LTypeofIsAndBranch* instr) {
4904 Register input =
ToRegister(instr->InputAt(0));
4905 int true_block = chunk_->LookupDestination(instr->true_block_id());
4906 int false_block = chunk_->LookupDestination(instr->false_block_id());
4907 Label* true_label = chunk_->GetAssemblyLabel(true_block);
4908 Label* false_label = chunk_->GetAssemblyLabel(false_block);
4911 EmitTypeofIs(true_label, false_label, input, instr->type_literal());
4913 EmitBranch(true_block, false_block, final_branch_condition);
4918 Condition LCodeGen::EmitTypeofIs(Label* true_label,
4921 Handle<String> type_name) {
4923 if (type_name->Equals(heap()->number_symbol())) {
4924 __ JumpIfSmi(input, true_label);
4926 factory()->heap_number_map());
4927 final_branch_condition =
equal;
4929 }
else if (type_name->Equals(heap()->string_symbol())) {
4930 __ JumpIfSmi(input, false_label);
4935 final_branch_condition =
zero;
4937 }
else if (type_name->Equals(heap()->boolean_symbol())) {
4938 __ cmp(input, factory()->true_value());
4940 __ cmp(input, factory()->false_value());
4941 final_branch_condition =
equal;
4943 }
else if (FLAG_harmony_typeof && type_name->Equals(heap()->null_symbol())) {
4944 __ cmp(input, factory()->null_value());
4945 final_branch_condition =
equal;
4947 }
else if (type_name->Equals(heap()->undefined_symbol())) {
4948 __ cmp(input, factory()->undefined_value());
4950 __ JumpIfSmi(input, false_label);
4957 }
else if (type_name->Equals(heap()->function_symbol())) {
4959 __ JumpIfSmi(input, false_label);
4963 final_branch_condition =
equal;
4965 }
else if (type_name->Equals(heap()->object_symbol())) {
4966 __ JumpIfSmi(input, false_label);
4967 if (!FLAG_harmony_typeof) {
4968 __ cmp(input, factory()->null_value());
4978 final_branch_condition =
zero;
4981 __ jmp(false_label);
4983 return final_branch_condition;
4987 void LCodeGen::DoIsConstructCallAndBranch(LIsConstructCallAndBranch* instr) {
4988 Register temp =
ToRegister(instr->TempAt(0));
4989 int true_block = chunk_->LookupDestination(instr->true_block_id());
4990 int false_block = chunk_->LookupDestination(instr->false_block_id());
4992 EmitIsConstructCall(temp);
4993 EmitBranch(true_block, false_block,
equal);
4997 void LCodeGen::EmitIsConstructCall(Register temp) {
5002 Label check_frame_marker;
5005 __ j(
not_equal, &check_frame_marker, Label::kNear);
5009 __ bind(&check_frame_marker);
5015 void LCodeGen::EnsureSpaceForLazyDeopt() {
5018 int current_pc = masm()->pc_offset();
5020 if (current_pc < last_lazy_deopt_pc_ + patch_size) {
5021 int padding_size = last_lazy_deopt_pc_ + patch_size - current_pc;
5022 __ Nop(padding_size);
5024 last_lazy_deopt_pc_ = masm()->pc_offset();
5028 void LCodeGen::DoLazyBailout(LLazyBailout* instr) {
5029 EnsureSpaceForLazyDeopt();
5030 ASSERT(instr->HasEnvironment());
5031 LEnvironment* env = instr->environment();
5032 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
5033 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
5037 void LCodeGen::DoDeoptimize(LDeoptimize* instr) {
5042 void LCodeGen::DoDeleteProperty(LDeleteProperty* instr) {
5043 LOperand* obj = instr->object();
5044 LOperand* key = instr->key();
5045 __ push(ToOperand(obj));
5046 EmitPushTaggedOperand(key);
5047 ASSERT(instr->HasPointerMap());
5048 LPointerMap* pointers = instr->pointer_map();
5049 RecordPosition(pointers->position());
5053 SafepointGenerator safepoint_generator(
5054 this, pointers, Safepoint::kLazyDeopt);
5056 __ InvokeBuiltin(Builtins::DELETE,
CALL_FUNCTION, safepoint_generator);
5060 void LCodeGen::DoDeferredStackCheck(LStackCheck* instr) {
5061 PushSafepointRegistersScope scope(
this);
5063 __ CallRuntimeSaveDoubles(Runtime::kStackGuard);
5064 RecordSafepointWithLazyDeopt(
5065 instr, RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
5066 ASSERT(instr->HasEnvironment());
5067 LEnvironment* env = instr->environment();
5068 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
5072 void LCodeGen::DoStackCheck(LStackCheck* instr) {
5073 class DeferredStackCheck:
public LDeferredCode {
5075 DeferredStackCheck(LCodeGen* codegen, LStackCheck* instr)
5076 : LDeferredCode(codegen), instr_(instr) { }
5077 virtual void Generate() { codegen()->DoDeferredStackCheck(instr_); }
5078 virtual LInstruction* instr() {
return instr_; }
5080 LStackCheck* instr_;
5083 ASSERT(instr->HasEnvironment());
5084 LEnvironment* env = instr->environment();
5087 if (instr->hydrogen()->is_function_entry()) {
5090 ExternalReference stack_limit =
5091 ExternalReference::address_of_stack_limit(isolate());
5092 __ cmp(
esp, Operand::StaticVariable(stack_limit));
5095 ASSERT(instr->context()->IsRegister());
5097 StackCheckStub stub;
5098 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
5099 EnsureSpaceForLazyDeopt();
5101 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
5102 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
5104 ASSERT(instr->hydrogen()->is_backwards_branch());
5106 DeferredStackCheck* deferred_stack_check =
5107 new(zone()) DeferredStackCheck(
this, instr);
5108 ExternalReference stack_limit =
5109 ExternalReference::address_of_stack_limit(isolate());
5110 __ cmp(
esp, Operand::StaticVariable(stack_limit));
5111 __ j(
below, deferred_stack_check->entry());
5112 EnsureSpaceForLazyDeopt();
5113 __ bind(instr->done_label());
5114 deferred_stack_check->SetExit(instr->done_label());
5115 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
5123 void LCodeGen::DoOsrEntry(LOsrEntry* instr) {
5127 LEnvironment* environment = instr->environment();
5128 environment->SetSpilledRegisters(instr->SpilledRegisterArray(),
5129 instr->SpilledDoubleRegisterArray());
5133 ASSERT(!environment->HasBeenRegistered());
5134 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt);
5135 ASSERT(osr_pc_offset_ == -1);
5136 osr_pc_offset_ = masm()->pc_offset();
5140 void LCodeGen::DoIn(LIn* instr) {
5141 LOperand* obj = instr->object();
5142 LOperand* key = instr->key();
5143 EmitPushTaggedOperand(key);
5144 EmitPushTaggedOperand(obj);
5145 ASSERT(instr->HasPointerMap());
5146 LPointerMap* pointers = instr->pointer_map();
5147 RecordPosition(pointers->position());
5148 SafepointGenerator safepoint_generator(
5149 this, pointers, Safepoint::kLazyDeopt);
5154 void LCodeGen::DoForInPrepareMap(LForInPrepareMap* instr) {
5155 __ cmp(
eax, isolate()->factory()->undefined_value());
5156 DeoptimizeIf(
equal, instr->environment());
5158 __ cmp(
eax, isolate()->factory()->null_value());
5159 DeoptimizeIf(
equal, instr->environment());
5162 DeoptimizeIf(zero, instr->environment());
5168 Label use_cache, call_runtime;
5169 __ CheckEnumCache(&call_runtime);
5172 __ jmp(&use_cache, Label::kNear);
5175 __ bind(&call_runtime);
5177 CallRuntime(Runtime::kGetPropertyNamesFast, 1, instr);
5180 isolate()->factory()->meta_map());
5181 DeoptimizeIf(
not_equal, instr->environment());
5182 __ bind(&use_cache);
5186 void LCodeGen::DoForInCacheArray(LForInCacheArray* instr) {
5188 Register result =
ToRegister(instr->result());
5189 __ LoadInstanceDescriptors(map, result);
5194 __ test(result, result);
5195 DeoptimizeIf(
equal, instr->environment());
5199 void LCodeGen::DoCheckMapValue(LCheckMapValue* instr) {
5200 Register
object =
ToRegister(instr->value());
5203 DeoptimizeIf(
not_equal, instr->environment());
5207 void LCodeGen::DoLoadFieldByIndex(LLoadFieldByIndex* instr) {
5208 Register
object =
ToRegister(instr->object());
5211 Label out_of_object, done;
5212 __ cmp(index, Immediate(0));
5213 __ j(
less, &out_of_object);
5218 __ jmp(&done, Label::kNear);
5220 __ bind(&out_of_object);
5227 FixedArray::kHeaderSize - kPointerSize));
5236 #endif // V8_TARGET_ARCH_IA32
static const int kCallerFPOffset
static const int kElementsKindMask
static const int kLengthOffset
static const int kBitFieldOffset
static LGap * cast(LInstruction *instr)
const intptr_t kSmiTagMask
static const int kCodeEntryOffset
static const int kMaxAsciiCharCode
static const int kPrototypeOrInitialMapOffset
const char * ToCString(const v8::String::Utf8Value &value)
static int SlotOffset(int index)
virtual void AfterCall() const
void PrintF(const char *format,...)
static const uint32_t kExponentMask
static Smi * FromInt(int value)
bool IsFastObjectElementsKind(ElementsKind kind)
value format" "after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false, "print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false, "print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false, "report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true, "garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true, "flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true, "use incremental marking") DEFINE_bool(incremental_marking_steps, true, "do incremental marking steps") DEFINE_bool(trace_incremental_marking, false, "trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true, "Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false, "Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true, "use inline caching") DEFINE_bool(native_code_counters, false, "generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false, "Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true, "Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false, "Never perform compaction on full GC-testing only") DEFINE_bool(compact_code_space, true, "Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true, "Flush inline caches prior to mark compact collection and" "flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0, "Default seed for initializing random generator" "(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true, "allows verbose printing") DEFINE_bool(allow_natives_syntax, false, "allow natives syntax") DEFINE_bool(trace_sim, false, "Trace simulator execution") DEFINE_bool(check_icache, false, "Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0, "Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8, "Stack alingment in bytes in simulator(4 or 8, 8 is default)") DEFINE_bool(trace_exception, false, "print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false, "preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true, "randomize hashes to avoid predictable hash collisions" "(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0, "Fixed seed to use to hash property keys(0 means random)" "(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false, "activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true, "generate optimized regexp code") DEFINE_bool(testing_bool_flag, true, "testing_bool_flag") DEFINE_int(testing_int_flag, 13, "testing_int_flag") DEFINE_float(testing_float_flag, 2.5, "float-flag") DEFINE_string(testing_string_flag, "Hello, world!", "string-flag") DEFINE_int(testing_prng_seed, 42, "Seed used for threading test randomness") DEFINE_string(testing_serialization_file, "/tmp/serdes", "file in which to serialize heap") DEFINE_bool(help, false, "Print usage message, including flags, on console") DEFINE_bool(dump_counters, false, "Dump counters on exit") DEFINE_string(map_counters, "", "Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT, "Pass all remaining arguments to the script.Alias for\"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#43"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2#define FLAG_MODE_DEFINE_DEFAULTS#1"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flag-definitions.h"1#define FLAG_FULL(ftype, ctype, nam, def, cmt)#define FLAG_READONLY(ftype, ctype, nam, def, cmt)#define DEFINE_implication(whenflag, thenflag)#define DEFINE_bool(nam, def, cmt)#define DEFINE_int(nam, def, cmt)#define DEFINE_float(nam, def, cmt)#define DEFINE_string(nam, def, cmt)#define DEFINE_args(nam, def, cmt)#define FLAG DEFINE_bool(use_strict, false,"enforce strict mode") DEFINE_bool(es5_readonly, false,"activate correct semantics for inheriting readonliness") DEFINE_bool(es52_globals, false,"activate new semantics for global var declarations") DEFINE_bool(harmony_typeof, false,"enable harmony semantics for typeof") DEFINE_bool(harmony_scoping, false,"enable harmony block scoping") DEFINE_bool(harmony_modules, false,"enable harmony modules (implies block scoping)") DEFINE_bool(harmony_proxies, false,"enable harmony proxies") DEFINE_bool(harmony_collections, false,"enable harmony collections (sets, maps, and weak maps)") DEFINE_bool(harmony, false,"enable all harmony features (except typeof)") DEFINE_implication(harmony, harmony_scoping) DEFINE_implication(harmony, harmony_modules) DEFINE_implication(harmony, harmony_proxies) DEFINE_implication(harmony, harmony_collections) DEFINE_implication(harmony_modules, harmony_scoping) DEFINE_bool(packed_arrays, false,"optimizes arrays that have no holes") DEFINE_bool(smi_only_arrays, true,"tracks arrays with only smi values") DEFINE_bool(clever_optimizations, true,"Optimize object size, Array shift, DOM strings and string +") DEFINE_bool(unbox_double_arrays, true,"automatically unbox arrays of doubles") DEFINE_bool(string_slices, true,"use string slices") DEFINE_bool(crankshaft, true,"use crankshaft") DEFINE_string(hydrogen_filter,"","optimization filter") DEFINE_bool(use_range, true,"use hydrogen range analysis") DEFINE_bool(eliminate_dead_phis, true,"eliminate dead phis") DEFINE_bool(use_gvn, true,"use hydrogen global value numbering") DEFINE_bool(use_canonicalizing, true,"use hydrogen instruction canonicalizing") DEFINE_bool(use_inlining, true,"use function inlining") DEFINE_int(max_inlined_source_size, 600,"maximum source size in bytes considered for a single inlining") DEFINE_int(max_inlined_nodes, 196,"maximum number of AST nodes considered for a single inlining") DEFINE_int(max_inlined_nodes_cumulative, 196,"maximum cumulative number of AST nodes considered for inlining") DEFINE_bool(loop_invariant_code_motion, true,"loop invariant code motion") DEFINE_bool(collect_megamorphic_maps_from_stub_cache, true,"crankshaft harvests type feedback from stub cache") DEFINE_bool(hydrogen_stats, false,"print statistics for hydrogen") DEFINE_bool(trace_hydrogen, false,"trace generated hydrogen to file") DEFINE_string(trace_phase,"Z","trace generated IR for specified phases") DEFINE_bool(trace_inlining, false,"trace inlining decisions") DEFINE_bool(trace_alloc, false,"trace register allocator") DEFINE_bool(trace_all_uses, false,"trace all use positions") DEFINE_bool(trace_range, false,"trace range analysis") DEFINE_bool(trace_gvn, false,"trace global value numbering") DEFINE_bool(trace_representation, false,"trace representation types") DEFINE_bool(stress_pointer_maps, false,"pointer map for every instruction") DEFINE_bool(stress_environments, false,"environment for every instruction") DEFINE_int(deopt_every_n_times, 0,"deoptimize every n times a deopt point is passed") DEFINE_bool(trap_on_deopt, false,"put a break point before deoptimizing") DEFINE_bool(deoptimize_uncommon_cases, true,"deoptimize uncommon cases") DEFINE_bool(polymorphic_inlining, true,"polymorphic inlining") DEFINE_bool(use_osr, true,"use on-stack replacement") DEFINE_bool(array_bounds_checks_elimination, false,"perform array bounds checks elimination") DEFINE_bool(array_index_dehoisting, false,"perform array index dehoisting") DEFINE_bool(trace_osr, false,"trace on-stack replacement") DEFINE_int(stress_runs, 0,"number of stress runs") DEFINE_bool(optimize_closures, true,"optimize closures") DEFINE_bool(inline_construct, true,"inline constructor calls") DEFINE_bool(inline_arguments, true,"inline functions with arguments object") DEFINE_int(loop_weight, 1,"loop weight for representation inference") DEFINE_bool(optimize_for_in, true,"optimize functions containing for-in loops") DEFINE_bool(experimental_profiler, true,"enable all profiler experiments") DEFINE_bool(watch_ic_patching, false,"profiler considers IC stability") DEFINE_int(frame_count, 1,"number of stack frames inspected by the profiler") DEFINE_bool(self_optimization, false,"primitive functions trigger their own optimization") DEFINE_bool(direct_self_opt, false,"call recompile stub directly when self-optimizing") DEFINE_bool(retry_self_opt, false,"re-try self-optimization if it failed") DEFINE_bool(count_based_interrupts, false,"trigger profiler ticks based on counting instead of timing") DEFINE_bool(interrupt_at_exit, false,"insert an interrupt check at function exit") DEFINE_bool(weighted_back_edges, false,"weight back edges by jump distance for interrupt triggering") DEFINE_int(interrupt_budget, 5900,"execution budget before interrupt is triggered") DEFINE_int(type_info_threshold, 15,"percentage of ICs that must have type info to allow optimization") DEFINE_int(self_opt_count, 130,"call count before self-optimization") DEFINE_implication(experimental_profiler, watch_ic_patching) DEFINE_implication(experimental_profiler, self_optimization) DEFINE_implication(experimental_profiler, retry_self_opt) DEFINE_implication(experimental_profiler, count_based_interrupts) DEFINE_implication(experimental_profiler, interrupt_at_exit) DEFINE_implication(experimental_profiler, weighted_back_edges) DEFINE_bool(trace_opt_verbose, false,"extra verbose compilation tracing") DEFINE_implication(trace_opt_verbose, trace_opt) DEFINE_bool(debug_code, false,"generate extra code (assertions) for debugging") DEFINE_bool(code_comments, false,"emit comments in code disassembly") DEFINE_bool(enable_sse2, true,"enable use of SSE2 instructions if available") DEFINE_bool(enable_sse3, true,"enable use of SSE3 instructions if available") DEFINE_bool(enable_sse4_1, true,"enable use of SSE4.1 instructions if available") DEFINE_bool(enable_cmov, true,"enable use of CMOV instruction if available") DEFINE_bool(enable_rdtsc, true,"enable use of RDTSC instruction if available") DEFINE_bool(enable_sahf, true,"enable use of SAHF instruction if available (X64 only)") DEFINE_bool(enable_vfp3, true,"enable use of VFP3 instructions if available - this implies ""enabling ARMv7 instructions (ARM only)") DEFINE_bool(enable_armv7, true,"enable use of ARMv7 instructions if available (ARM only)") DEFINE_bool(enable_fpu, true,"enable use of MIPS FPU instructions if available (MIPS only)") DEFINE_string(expose_natives_as, NULL,"expose natives in global object") DEFINE_string(expose_debug_as, NULL,"expose debug in global object") DEFINE_bool(expose_gc, false,"expose gc extension") DEFINE_bool(expose_externalize_string, false,"expose externalize string extension") DEFINE_int(stack_trace_limit, 10,"number of stack frames to capture") DEFINE_bool(builtins_in_stack_traces, false,"show built-in functions in stack traces") DEFINE_bool(disable_native_files, false,"disable builtin natives files") DEFINE_bool(inline_new, true,"use fast inline allocation") DEFINE_bool(stack_trace_on_abort, true,"print a stack trace if an assertion failure occurs") DEFINE_bool(trace, false,"trace function calls") DEFINE_bool(mask_constants_with_cookie, true,"use random jit cookie to mask large constants") DEFINE_bool(lazy, true,"use lazy compilation") DEFINE_bool(trace_opt, false,"trace lazy optimization") DEFINE_bool(trace_opt_stats, false,"trace lazy optimization statistics") DEFINE_bool(opt, true,"use adaptive optimizations") DEFINE_bool(always_opt, false,"always try to optimize functions") DEFINE_bool(prepare_always_opt, false,"prepare for turning on always opt") DEFINE_bool(trace_deopt, false,"trace deoptimization") DEFINE_int(min_preparse_length, 1024,"minimum length for automatic enable preparsing") DEFINE_bool(always_full_compiler, false,"try to use the dedicated run-once backend for all code") DEFINE_bool(trace_bailout, false,"print reasons for falling back to using the classic V8 backend") DEFINE_bool(compilation_cache, true,"enable compilation cache") DEFINE_bool(cache_prototype_transitions, true,"cache prototype transitions") DEFINE_bool(trace_debug_json, false,"trace debugging JSON request/response") DEFINE_bool(debugger_auto_break, true,"automatically set the debug break flag when debugger commands are ""in the queue") DEFINE_bool(enable_liveedit, true,"enable liveedit experimental feature") DEFINE_bool(break_on_abort, true,"always cause a debug break before aborting") DEFINE_int(stack_size, kPointerSize *123,"default size of stack region v8 is allowed to use (in kBytes)") DEFINE_int(max_stack_trace_source_length, 300,"maximum length of function source code printed in a stack trace.") DEFINE_bool(always_inline_smi_code, false,"always inline smi code in non-opt code") DEFINE_int(max_new_space_size, 0,"max size of the new generation (in kBytes)") DEFINE_int(max_old_space_size, 0,"max size of the old generation (in Mbytes)") DEFINE_int(max_executable_size, 0,"max size of executable memory (in Mbytes)") DEFINE_bool(gc_global, false,"always perform global GCs") DEFINE_int(gc_interval,-1,"garbage collect after <n> allocations") DEFINE_bool(trace_gc, false,"print one trace line following each garbage collection") DEFINE_bool(trace_gc_nvp, false,"print one detailed trace line in name=value format ""after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false,"print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false,"print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false,"report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true,"garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true,"flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true,"use incremental marking") DEFINE_bool(incremental_marking_steps, true,"do incremental marking steps") DEFINE_bool(trace_incremental_marking, false,"trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true,"Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false,"Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true,"use inline caching") DEFINE_bool(native_code_counters, false,"generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false,"Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true,"Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false,"Never perform compaction on full GC - testing only") DEFINE_bool(compact_code_space, true,"Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true,"Flush inline caches prior to mark compact collection and ""flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0,"Default seed for initializing random generator ""(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true,"allows verbose printing") DEFINE_bool(allow_natives_syntax, false,"allow natives syntax") DEFINE_bool(trace_sim, false,"Trace simulator execution") DEFINE_bool(check_icache, false,"Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0,"Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8,"Stack alingment in bytes in simulator (4 or 8, 8 is default)") DEFINE_bool(trace_exception, false,"print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false,"preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true,"randomize hashes to avoid predictable hash collisions ""(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0,"Fixed seed to use to hash property keys (0 means random)""(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false,"activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true,"generate optimized regexp code") DEFINE_bool(testing_bool_flag, true,"testing_bool_flag") DEFINE_int(testing_int_flag, 13,"testing_int_flag") DEFINE_float(testing_float_flag, 2.5,"float-flag") DEFINE_string(testing_string_flag,"Hello, world!","string-flag") DEFINE_int(testing_prng_seed, 42,"Seed used for threading test randomness") DEFINE_string(testing_serialization_file,"/tmp/serdes","file in which to serialize heap") DEFINE_bool(help, false,"Print usage message, including flags, on console") DEFINE_bool(dump_counters, false,"Dump counters on exit") DEFINE_string(map_counters,"","Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT,"Pass all remaining arguments to the script. Alias for \"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#47"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2 namespace{struct Flag{enum FlagType{TYPE_BOOL, TYPE_INT, TYPE_FLOAT, TYPE_STRING, TYPE_ARGS} name
static HeapObject * cast(Object *obj)
static Handle< T > cast(Handle< S > that)
static const int kGlobalReceiverOffset
static const int kNativeByteOffset
static const int kExponentBias
static const int kNoNumber
static XMMRegister FromAllocationIndex(int index)
static bool IsSupported(CpuFeature f)
static const int kStrictModeBitWithinByte
const int kNoAlignmentPadding
static const int kExternalPointerOffset
virtual ~SafepointGenerator()
static const int kCallerSPOffset
#define ASSERT(condition)
bool CanTransitionToMoreGeneralFastElementsKind(ElementsKind elements_kind, bool allow_only_packed)
#define ASSERT_GE(v1, v2)
const int kPointerSizeLog2
static const int kInstanceSizeOffset
static const int kInObjectFieldCount
static const int kStressDeoptCounterOffset
static const int kMaximumSlots
static const int kInstanceClassNameOffset
static const int kUnusedPropertyFieldsOffset
static const int kGlobalContextOffset
bool IsSimpleMapChangeTransition(ElementsKind from_kind, ElementsKind to_kind)
MemOperand ContextOperand(Register context, int index)
static const int kContextOffset
const int kAlignmentPaddingPushed
Handle< String > SubString(Handle< String > str, int start, int end, PretenureFlag pretenure)
static const int kHashFieldOffset
Condition ReverseCondition(Condition cond)
const uint32_t kSlotsZapValue
static const int kLiteralsOffset
STATIC_ASSERT((FixedDoubleArray::kHeaderSize &kDoubleAlignmentMask)==0)
static const int kLengthOffset
static const int kExponentShift
static const int kValueOffset
const uint32_t kHoleNanUpper32
Operand FieldOperand(Register object, int offset)
static LConstantOperand * cast(LOperand *op)
const uint32_t kHoleNanLower32
static Register FromAllocationIndex(int index)
static const int kDynamicAlignmentStateOffset
static void VPrint(const char *format, va_list args)
static const int kCacheStampOffset
static const int kPropertiesOffset
const int kAlignmentZapValue
static const int kInObjectPropertiesOffset
bool IsFastSmiElementsKind(ElementsKind kind)
static int OffsetOfElementAt(int index)
static void Generate(MacroAssembler *masm, Register string, Register index, Register result, Label *call_runtime)
static void EnsureRelocSpaceForLazyDeoptimization(Handle< Code > code)
static const int kElementsOffset
static const int kNativeBitWithinByte
static const int kContainsCachedArrayIndexMask
static Vector< T > New(int length)
int ElementsKindToShiftSize(ElementsKind elements_kind)
Vector< const char > CStrVector(const char *data)
static int OffsetOfElementAt(int index)
static const int kLengthOffset
static int SizeFor(int length)
static const int kHeaderSize
static const int kEnumerationIndexOffset
static const int kMapOffset
static const int kValueOffset
bool is(Register reg) const
static const int kLengthOffset
static Address GetDeoptimizationEntry(int id, BailoutType type)
static const int kHasNonInstancePrototype
static const int kContextOffset
static const int kFunctionOffset
ElementsKind GetInitialFastElementsKind()
static const uint32_t kSignMask
static const int kStrictModeByteOffset
Condition NegateCondition(Condition cond)
#define ASSERT_EQ(v1, v2)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination trace on stack replacement optimize closures functions with arguments object optimize functions containing for in loops profiler considers IC stability primitive functions trigger their own optimization re try self optimization if it failed insert an interrupt check at function exit execution budget before interrupt is triggered call count before self optimization self_optimization count_based_interrupts weighted_back_edges trace_opt emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 enable use of ARMv7 instructions if enable use of MIPS FPU instructions if NULL
static const int kElementsKindShift
static const int kConstructorOffset
#define ASSERT_NE(v1, v2)
static const int kIsUndetectable
static const int kHeaderSize
static const int kMaximumClonedProperties
static const int kPrototypeOffset
#define RUNTIME_ENTRY(name, nargs, ressize)
static const int kMaxLength
static const int kValueOffset
static const int kMarkerOffset
static const int kExponentBits
static const int kSharedFunctionInfoOffset
Register ToRegister(int num)
static const int kMaxValue
static const int kMantissaBits
static const int kBitField2Offset
static HValue * cast(HValue *value)
static Handle< Code > GetUninitialized(Token::Value op)
static const int kMaximumClonedLength
static const int kExponentOffset
bool EvalComparison(Token::Value op, double op1, double op2)
static JSObject * cast(Object *obj)
bool IsFastDoubleElementsKind(ElementsKind kind)
SafepointGenerator(LCodeGen *codegen, LPointerMap *pointers, Safepoint::DeoptMode mode)
static const int kInstanceTypeOffset
virtual void BeforeCall(int call_size) const
static const int kPreAllocatedPropertyFieldsOffset
static const int kMantissaOffset