39 class SafepointGenerator :
public CallWrapper {
43 Safepoint::DeoptMode mode)
52 codegen_->RecordSafepoint(pointers_, deopt_mode_);
58 Safepoint::DeoptMode deopt_mode_;
64 bool LCodeGen::GenerateCode() {
65 HPhase phase(
"Z_Code generation", chunk());
68 CpuFeatures::Scope scope(
FPU);
70 CodeStub::GenerateFPStubs();
77 return GeneratePrologue() &&
79 GenerateDeferredCode() &&
80 GenerateSafepointTable();
84 void LCodeGen::FinishCode(Handle<Code> code) {
86 code->set_stack_slots(GetStackSlotCount());
87 code->set_safepoint_table_offset(safepoints_.GetCodeOffset());
88 PopulateDeoptimizationData(code);
92 void LCodeGen::Abort(
const char* format, ...) {
93 if (FLAG_trace_bailout) {
94 SmartArrayPointer<char>
name(
95 info()->shared_info()->DebugName()->
ToCString());
96 PrintF(
"Aborting LCodeGen in @\"%s\": ", *
name);
98 va_start(arguments, format);
107 void LCodeGen::Comment(
const char* format, ...) {
108 if (!FLAG_code_comments)
return;
110 StringBuilder builder(buffer,
ARRAY_SIZE(buffer));
112 va_start(arguments, format);
113 builder.AddFormattedList(format, arguments);
118 size_t length = builder.position();
120 memcpy(copy.start(), builder.Finalize(), copy.length());
121 masm()->RecordComment(copy.start());
125 bool LCodeGen::GeneratePrologue() {
129 if (strlen(FLAG_stop_at) > 0 &&
130 info_->function()->name()->IsEqualTo(
CStrVector(FLAG_stop_at))) {
144 if (!info_->is_classic_mode() || info_->is_native()) {
146 __ Branch(&ok,
eq, t1, Operand(zero_reg));
148 int receiver_offset = scope()->num_parameters() *
kPointerSize;
149 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
158 int slots = GetStackSlotCount();
160 if (FLAG_debug_code) {
161 __ li(a0, Operand(slots));
167 __ Branch(&loop,
ne, a0, Operand(zero_reg));
175 if (heap_slots > 0) {
176 Comment(
";;; Allocate local context");
180 FastNewContextStub stub(heap_slots);
183 __ CallRuntime(Runtime::kNewFunctionContext, 1);
185 RecordSafepoint(Safepoint::kNoLazyDeopt);
190 int num_parameters = scope()->num_parameters();
191 for (
int i = 0; i < num_parameters; i++) {
192 Variable* var = scope()->parameter(i);
193 if (var->IsContextSlot()) {
202 __ RecordWriteContextSlot(
206 Comment(
";;; End allocate local context");
211 __ CallRuntime(Runtime::kTraceEnter, 0);
213 EnsureSpaceForLazyDeopt();
214 return !is_aborted();
218 bool LCodeGen::GenerateBody() {
220 bool emit_instructions =
true;
221 for (current_instruction_ = 0;
222 !is_aborted() && current_instruction_ < instructions_->length();
223 current_instruction_++) {
224 LInstruction* instr = instructions_->at(current_instruction_);
225 if (instr->IsLabel()) {
227 emit_instructions = !label->HasReplacement();
230 if (emit_instructions) {
231 Comment(
";;; @%d: %s.", current_instruction_, instr->Mnemonic());
232 instr->CompileToNative(
this);
235 return !is_aborted();
239 bool LCodeGen::GenerateDeferredCode() {
241 if (deferred_.length() > 0) {
242 for (
int i = 0; !is_aborted() && i < deferred_.length(); i++) {
243 LDeferredCode* code = deferred_[i];
244 __ bind(code->entry());
245 Comment(
";;; Deferred code @%d: %s.",
246 code->instruction_index(),
247 code->instr()->Mnemonic());
249 __ jmp(code->exit());
254 if (!is_aborted()) status_ =
DONE;
255 return !is_aborted();
259 bool LCodeGen::GenerateDeoptJumpTable() {
262 Abort(
"Unimplemented: %s",
"GenerateDeoptJumpTable");
267 bool LCodeGen::GenerateSafepointTable() {
269 safepoints_.Emit(masm(), GetStackSlotCount());
270 return !is_aborted();
290 Register LCodeGen::EmitLoadRegister(LOperand* op, Register scratch) {
291 if (op->IsRegister()) {
293 }
else if (op->IsConstantOperand()) {
295 Handle<Object> literal = chunk_->LookupLiteral(const_op);
296 Representation r = chunk_->LookupLiteralRepresentation(const_op);
297 if (r.IsInteger32()) {
298 ASSERT(literal->IsNumber());
299 __ li(scratch, Operand(static_cast<int32_t>(literal->Number())));
300 }
else if (r.IsDouble()) {
301 Abort(
"EmitLoadRegister: Unsupported double immediate.");
304 if (literal->IsSmi()) {
305 __ li(scratch, Operand(literal));
307 __ LoadHeapObject(scratch, Handle<HeapObject>::cast(literal));
311 }
else if (op->IsStackSlot() || op->IsArgument()) {
312 __ lw(scratch, ToMemOperand(op));
321 ASSERT(op->IsDoubleRegister());
322 return ToDoubleRegister(op->index());
329 if (op->IsDoubleRegister()) {
330 return ToDoubleRegister(op->index());
331 }
else if (op->IsConstantOperand()) {
333 Handle<Object> literal = chunk_->LookupLiteral(const_op);
334 Representation r = chunk_->LookupLiteralRepresentation(const_op);
335 if (r.IsInteger32()) {
336 ASSERT(literal->IsNumber());
337 __ li(at, Operand(static_cast<int32_t>(literal->Number())));
338 __ mtc1(at, flt_scratch);
339 __ cvt_d_w(dbl_scratch, flt_scratch);
341 }
else if (r.IsDouble()) {
342 Abort(
"unsupported double immediate");
343 }
else if (r.IsTagged()) {
344 Abort(
"unsupported tagged immediate");
346 }
else if (op->IsStackSlot() || op->IsArgument()) {
348 __ ldc1(dbl_scratch, mem_op);
356 Handle<Object> LCodeGen::ToHandle(LConstantOperand* op)
const {
357 Handle<Object> literal = chunk_->LookupLiteral(op);
358 ASSERT(chunk_->LookupLiteralRepresentation(op).IsTagged());
363 bool LCodeGen::IsInteger32(LConstantOperand* op)
const {
364 return chunk_->LookupLiteralRepresentation(op).IsInteger32();
368 int LCodeGen::ToInteger32(LConstantOperand* op)
const {
369 Handle<Object> value = chunk_->LookupLiteral(op);
370 ASSERT(chunk_->LookupLiteralRepresentation(op).IsInteger32());
371 ASSERT(static_cast<double>(static_cast<int32_t>(value->Number())) ==
373 return static_cast<int32_t>(value->Number());
377 double LCodeGen::ToDouble(LConstantOperand* op)
const {
378 Handle<Object> value = chunk_->LookupLiteral(op);
379 return value->Number();
383 Operand LCodeGen::ToOperand(LOperand* op) {
384 if (op->IsConstantOperand()) {
386 Handle<Object> literal = chunk_->LookupLiteral(const_op);
387 Representation r = chunk_->LookupLiteralRepresentation(const_op);
388 if (r.IsInteger32()) {
389 ASSERT(literal->IsNumber());
390 return Operand(static_cast<int32_t>(literal->Number()));
391 }
else if (r.IsDouble()) {
392 Abort(
"ToOperand Unsupported double immediate.");
395 return Operand(literal);
396 }
else if (op->IsRegister()) {
398 }
else if (op->IsDoubleRegister()) {
399 Abort(
"ToOperand IsDoubleRegister unimplemented");
408 MemOperand LCodeGen::ToMemOperand(LOperand* op)
const {
409 ASSERT(!op->IsRegister());
410 ASSERT(!op->IsDoubleRegister());
411 ASSERT(op->IsStackSlot() || op->IsDoubleStackSlot());
412 int index = op->index();
424 MemOperand LCodeGen::ToHighMemOperand(LOperand* op)
const {
425 ASSERT(op->IsDoubleStackSlot());
426 int index = op->index();
439 void LCodeGen::WriteTranslation(LEnvironment* environment,
440 Translation* translation) {
441 if (environment ==
NULL)
return;
444 int translation_size = environment->values()->length();
446 int height = translation_size - environment->parameter_count();
448 WriteTranslation(environment->outer(), translation);
449 int closure_id = DefineDeoptimizationLiteral(environment->closure());
450 switch (environment->frame_type()) {
452 translation->BeginJSFrame(environment->ast_id(), closure_id, height);
455 translation->BeginConstructStubFrame(closure_id, translation_size);
458 translation->BeginArgumentsAdaptorFrame(closure_id, translation_size);
463 for (
int i = 0; i < translation_size; ++i) {
464 LOperand* value = environment->values()->at(i);
467 if (environment->spilled_registers() !=
NULL && value !=
NULL) {
468 if (value->IsRegister() &&
469 environment->spilled_registers()[value->index()] !=
NULL) {
470 translation->MarkDuplicate();
471 AddToTranslation(translation,
472 environment->spilled_registers()[value->index()],
473 environment->HasTaggedValueAt(i));
475 value->IsDoubleRegister() &&
476 environment->spilled_double_registers()[value->index()] !=
NULL) {
477 translation->MarkDuplicate();
480 environment->spilled_double_registers()[value->index()],
485 AddToTranslation(translation, value, environment->HasTaggedValueAt(i));
490 void LCodeGen::AddToTranslation(Translation* translation,
497 translation->StoreArgumentsObject();
498 }
else if (op->IsStackSlot()) {
500 translation->StoreStackSlot(op->index());
502 translation->StoreInt32StackSlot(op->index());
504 }
else if (op->IsDoubleStackSlot()) {
505 translation->StoreDoubleStackSlot(op->index());
506 }
else if (op->IsArgument()) {
508 int src_index = GetStackSlotCount() + op->index();
509 translation->StoreStackSlot(src_index);
510 }
else if (op->IsRegister()) {
513 translation->StoreRegister(reg);
515 translation->StoreInt32Register(reg);
517 }
else if (op->IsDoubleRegister()) {
519 translation->StoreDoubleRegister(reg);
520 }
else if (op->IsConstantOperand()) {
522 int src_index = DefineDeoptimizationLiteral(literal);
523 translation->StoreLiteral(src_index);
530 void LCodeGen::CallCode(Handle<Code> code,
531 RelocInfo::Mode mode,
532 LInstruction* instr) {
533 CallCodeGeneric(code, mode, instr, RECORD_SIMPLE_SAFEPOINT);
537 void LCodeGen::CallCodeGeneric(Handle<Code> code,
538 RelocInfo::Mode mode,
540 SafepointMode safepoint_mode) {
542 LPointerMap* pointers = instr->pointer_map();
543 RecordPosition(pointers->position());
545 RecordSafepointWithLazyDeopt(instr, safepoint_mode);
549 void LCodeGen::CallRuntime(
const Runtime::Function*
function,
551 LInstruction* instr) {
553 LPointerMap* pointers = instr->pointer_map();
555 RecordPosition(pointers->position());
557 __ CallRuntime(
function, num_arguments);
558 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT);
564 LInstruction* instr) {
565 __ CallRuntimeSaveDoubles(
id);
566 RecordSafepointWithRegisters(
567 instr->pointer_map(), argc, Safepoint::kNoLazyDeopt);
571 void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment,
572 Safepoint::DeoptMode mode) {
573 if (!environment->HasBeenRegistered()) {
588 int jsframe_count = 0;
589 for (LEnvironment* e = environment; e !=
NULL; e = e->outer()) {
595 Translation translation(&translations_, frame_count, jsframe_count, zone());
596 WriteTranslation(environment, &translation);
597 int deoptimization_index = deoptimizations_.length();
598 int pc_offset = masm()->pc_offset();
599 environment->Register(deoptimization_index,
601 (mode == Safepoint::kLazyDeopt) ? pc_offset : -1);
602 deoptimizations_.Add(environment, zone());
608 LEnvironment* environment,
610 const Operand& src2) {
611 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt);
612 ASSERT(environment->HasBeenRegistered());
613 int id = environment->deoptimization_index();
616 Abort(
"bailout was not prepared");
620 ASSERT(FLAG_deopt_every_n_times < 2);
622 if (FLAG_deopt_every_n_times == 1 &&
623 info_->shared_info()->opt_count() == id) {
628 if (FLAG_trap_on_deopt) {
633 __ stop(
"trap_on_deopt");
643 void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) {
644 int length = deoptimizations_.length();
645 if (length == 0)
return;
646 Handle<DeoptimizationInputData> data =
647 factory()->NewDeoptimizationInputData(length,
TENURED);
649 Handle<ByteArray> translations = translations_.CreateByteArray();
650 data->SetTranslationByteArray(*translations);
651 data->SetInlinedFunctionCount(
Smi::FromInt(inlined_function_count_));
653 Handle<FixedArray> literals =
654 factory()->NewFixedArray(deoptimization_literals_.length(),
TENURED);
655 for (
int i = 0; i < deoptimization_literals_.length(); i++) {
656 literals->set(i, *deoptimization_literals_[i]);
658 data->SetLiteralArray(*literals);
664 for (
int i = 0; i < length; i++) {
665 LEnvironment* env = deoptimizations_[i];
667 data->SetTranslationIndex(i,
Smi::FromInt(env->translation_index()));
668 data->SetArgumentsStackHeight(i,
672 code->set_deoptimization_data(*data);
676 int LCodeGen::DefineDeoptimizationLiteral(Handle<Object> literal) {
677 int result = deoptimization_literals_.length();
678 for (
int i = 0; i < deoptimization_literals_.length(); ++i) {
679 if (deoptimization_literals_[i].is_identical_to(literal))
return i;
681 deoptimization_literals_.Add(literal, zone());
686 void LCodeGen::PopulateDeoptimizationLiteralsWithInlinedFunctions() {
687 ASSERT(deoptimization_literals_.length() == 0);
689 const ZoneList<Handle<JSFunction> >* inlined_closures =
690 chunk()->inlined_closures();
692 for (
int i = 0, length = inlined_closures->length();
695 DefineDeoptimizationLiteral(inlined_closures->at(i));
698 inlined_function_count_ = deoptimization_literals_.length();
702 void LCodeGen::RecordSafepointWithLazyDeopt(
703 LInstruction* instr, SafepointMode safepoint_mode) {
704 if (safepoint_mode == RECORD_SIMPLE_SAFEPOINT) {
705 RecordSafepoint(instr->pointer_map(), Safepoint::kLazyDeopt);
707 ASSERT(safepoint_mode == RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
708 RecordSafepointWithRegisters(
709 instr->pointer_map(), 0, Safepoint::kLazyDeopt);
714 void LCodeGen::RecordSafepoint(
715 LPointerMap* pointers,
716 Safepoint::Kind kind,
718 Safepoint::DeoptMode deopt_mode) {
719 ASSERT(expected_safepoint_kind_ == kind);
721 const ZoneList<LOperand*>* operands = pointers->GetNormalizedOperands();
722 Safepoint safepoint = safepoints_.DefineSafepoint(masm(),
723 kind, arguments, deopt_mode);
724 for (
int i = 0; i < operands->length(); i++) {
725 LOperand* pointer = operands->at(i);
726 if (pointer->IsStackSlot()) {
727 safepoint.DefinePointerSlot(pointer->index(), zone());
728 }
else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) {
729 safepoint.DefinePointerRegister(
ToRegister(pointer), zone());
732 if (kind & Safepoint::kWithRegisters) {
734 safepoint.DefinePointerRegister(
cp, zone());
739 void LCodeGen::RecordSafepoint(LPointerMap* pointers,
740 Safepoint::DeoptMode deopt_mode) {
741 RecordSafepoint(pointers, Safepoint::kSimple, 0, deopt_mode);
745 void LCodeGen::RecordSafepoint(Safepoint::DeoptMode deopt_mode) {
746 LPointerMap empty_pointers(RelocInfo::kNoPosition, zone());
747 RecordSafepoint(&empty_pointers, deopt_mode);
751 void LCodeGen::RecordSafepointWithRegisters(LPointerMap* pointers,
753 Safepoint::DeoptMode deopt_mode) {
755 pointers, Safepoint::kWithRegisters, arguments, deopt_mode);
759 void LCodeGen::RecordSafepointWithRegistersAndDoubles(
760 LPointerMap* pointers,
762 Safepoint::DeoptMode deopt_mode) {
764 pointers, Safepoint::kWithRegistersAndDoubles, arguments, deopt_mode);
768 void LCodeGen::RecordPosition(
int position) {
769 if (position == RelocInfo::kNoPosition)
return;
770 masm()->positions_recorder()->RecordPosition(position);
774 void LCodeGen::DoLabel(LLabel* label) {
775 if (label->is_loop_header()) {
776 Comment(
";;; B%d - LOOP entry", label->block_id());
778 Comment(
";;; B%d", label->block_id());
780 __ bind(label->label());
781 current_block_ = label->block_id();
786 void LCodeGen::DoParallelMove(LParallelMove* move) {
787 resolver_.Resolve(move);
791 void LCodeGen::DoGap(LGap* gap) {
796 LParallelMove* move = gap->GetParallelMove(inner_pos);
797 if (move !=
NULL) DoParallelMove(move);
802 void LCodeGen::DoInstructionGap(LInstructionGap* instr) {
807 void LCodeGen::DoParameter(LParameter* instr) {
812 void LCodeGen::DoCallStub(LCallStub* instr) {
814 switch (instr->hydrogen()->major_key()) {
815 case CodeStub::RegExpConstructResult: {
816 RegExpConstructResultStub stub;
817 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
820 case CodeStub::RegExpExec: {
822 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
827 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
830 case CodeStub::NumberToString: {
831 NumberToStringStub stub;
832 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
835 case CodeStub::StringAdd: {
837 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
840 case CodeStub::StringCompare: {
841 StringCompareStub stub;
842 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
845 case CodeStub::TranscendentalCache: {
847 TranscendentalCacheStub stub(instr->transcendental_type(),
849 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
858 void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) {
863 void LCodeGen::DoModI(LModI* instr) {
864 Register scratch = scratch0();
865 const Register left =
ToRegister(instr->InputAt(0));
866 const Register result =
ToRegister(instr->result());
870 if (instr->hydrogen()->HasPowerOf2Divisor()) {
871 Register scratch = scratch0();
872 ASSERT(!left.is(scratch));
873 __ mov(scratch, left);
875 instr->hydrogen()->right())->Integer32Value();
878 p2constant = abs(p2constant);
880 Label positive_dividend;
882 __ subu(result, zero_reg, left);
883 __ And(result, result, p2constant - 1);
885 DeoptimizeIf(
eq, instr->environment(), result, Operand(zero_reg));
888 __ subu(result, zero_reg, result);
889 __ bind(&positive_dividend);
890 __ And(result, scratch, p2constant - 1);
893 Register right = EmitLoadRegister(instr->InputAt(1), scratch);
898 DeoptimizeIf(
eq, instr->environment(), right, Operand(zero_reg));
905 DeoptimizeIf(
eq, instr->environment(), result, Operand(zero_reg));
912 void LCodeGen::DoDivI(LDivI* instr) {
913 const Register left =
ToRegister(instr->InputAt(0));
914 const Register right =
ToRegister(instr->InputAt(1));
915 const Register result =
ToRegister(instr->result());
923 DeoptimizeIf(
eq, instr->environment(), right, Operand(zero_reg));
929 __ Branch(&left_not_zero,
ne, left, Operand(zero_reg));
930 DeoptimizeIf(
lt, instr->environment(), right, Operand(zero_reg));
931 __ bind(&left_not_zero);
936 Label left_not_min_int;
937 __ Branch(&left_not_min_int,
ne, left, Operand(
kMinInt));
938 DeoptimizeIf(
eq, instr->environment(), right, Operand(-1));
939 __ bind(&left_not_min_int);
943 DeoptimizeIf(
ne, instr->environment(), result, Operand(zero_reg));
948 void LCodeGen::DoMulI(LMulI* instr) {
949 Register scratch = scratch0();
950 Register result =
ToRegister(instr->result());
952 Register left =
ToRegister(instr->InputAt(0));
953 LOperand* right_op = instr->InputAt(1);
956 bool bailout_on_minus_zero =
959 if (right_op->IsConstantOperand() && !can_overflow) {
963 if (bailout_on_minus_zero && (constant < 0)) {
966 DeoptimizeIf(
eq, instr->environment(), left, Operand(zero_reg));
971 __ Subu(result, zero_reg, left);
974 if (bailout_on_minus_zero) {
977 DeoptimizeIf(
lt, instr->environment(), left, Operand(zero_reg));
979 __ mov(result, zero_reg);
983 __ Move(result, left);
990 uint32_t constant_abs = (constant + mask) ^ mask;
997 __ sll(result, left, shift);
1000 __ sll(result, left, shift);
1001 __ Addu(result, result, left);
1004 __ sll(result, left, shift);
1005 __ Subu(result, result, left);
1010 __ Subu(result, zero_reg, result);
1015 __ li(at, constant);
1016 __ Mul(result, left, at);
1021 Register right = EmitLoadRegister(right_op, scratch);
1022 if (bailout_on_minus_zero) {
1028 __ mult(left, right);
1031 __ sra(at, result, 31);
1032 DeoptimizeIf(
ne, instr->environment(), scratch, Operand(at));
1034 __ Mul(result, left, right);
1037 if (bailout_on_minus_zero) {
1040 __ Branch(&done,
ne, result, Operand(zero_reg));
1042 instr->environment(),
1051 void LCodeGen::DoBitI(LBitI* instr) {
1052 LOperand* left_op = instr->InputAt(0);
1053 LOperand* right_op = instr->InputAt(1);
1054 ASSERT(left_op->IsRegister());
1056 Register result =
ToRegister(instr->result());
1059 if (right_op->IsStackSlot() || right_op->IsArgument()) {
1060 right = Operand(EmitLoadRegister(right_op, at));
1062 ASSERT(right_op->IsRegister() || right_op->IsConstantOperand());
1063 right = ToOperand(right_op);
1066 switch (instr->op()) {
1067 case Token::BIT_AND:
1068 __ And(result, left, right);
1071 __ Or(result, left, right);
1073 case Token::BIT_XOR:
1074 __ Xor(result, left, right);
1083 void LCodeGen::DoShiftI(LShiftI* instr) {
1086 LOperand* right_op = instr->InputAt(1);
1087 Register left =
ToRegister(instr->InputAt(0));
1088 Register result =
ToRegister(instr->result());
1090 if (right_op->IsRegister()) {
1093 switch (instr->op()) {
1099 if (instr->can_deopt()) {
1100 DeoptimizeIf(
lt, instr->environment(), result, Operand(zero_reg));
1113 uint8_t shift_count =
static_cast<uint8_t
>(value & 0x1F);
1114 switch (instr->op()) {
1116 if (shift_count != 0) {
1117 __ sra(result, left, shift_count);
1119 __ Move(result, left);
1123 if (shift_count != 0) {
1124 __ srl(result, left, shift_count);
1126 if (instr->can_deopt()) {
1127 __ And(at, left, Operand(0x80000000));
1128 DeoptimizeIf(
ne, instr->environment(), at, Operand(zero_reg));
1130 __ Move(result, left);
1134 if (shift_count != 0) {
1135 __ sll(result, left, shift_count);
1137 __ Move(result, left);
1148 void LCodeGen::DoSubI(LSubI* instr) {
1149 LOperand* left = instr->InputAt(0);
1150 LOperand* right = instr->InputAt(1);
1151 LOperand* result = instr->result();
1154 if (!can_overflow) {
1155 if (right->IsStackSlot() || right->IsArgument()) {
1156 Register right_reg = EmitLoadRegister(right, at);
1159 ASSERT(right->IsRegister() || right->IsConstantOperand());
1164 Register scratch = scratch1();
1165 if (right->IsStackSlot() ||
1166 right->IsArgument() ||
1167 right->IsConstantOperand()) {
1168 Register right_reg = EmitLoadRegister(right, scratch);
1174 ASSERT(right->IsRegister());
1182 DeoptimizeIf(
lt, instr->environment(),
overflow, Operand(zero_reg));
1187 void LCodeGen::DoConstantI(LConstantI* instr) {
1188 ASSERT(instr->result()->IsRegister());
1189 __ li(
ToRegister(instr->result()), Operand(instr->value()));
1193 void LCodeGen::DoConstantD(LConstantD* instr) {
1194 ASSERT(instr->result()->IsDoubleRegister());
1196 double v = instr->value();
1201 void LCodeGen::DoConstantT(LConstantT* instr) {
1202 Handle<Object> value = instr->value();
1203 if (value->IsSmi()) {
1207 Handle<HeapObject>::cast(value));
1212 void LCodeGen::DoJSArrayLength(LJSArrayLength* instr) {
1213 Register result =
ToRegister(instr->result());
1214 Register array =
ToRegister(instr->InputAt(0));
1219 void LCodeGen::DoFixedArrayBaseLength(LFixedArrayBaseLength* instr) {
1220 Register result =
ToRegister(instr->result());
1221 Register array =
ToRegister(instr->InputAt(0));
1226 void LCodeGen::DoElementsKind(LElementsKind* instr) {
1227 Register result =
ToRegister(instr->result());
1228 Register input =
ToRegister(instr->InputAt(0));
1240 void LCodeGen::DoValueOf(LValueOf* instr) {
1241 Register input =
ToRegister(instr->InputAt(0));
1242 Register result =
ToRegister(instr->result());
1247 __ Move(result, input);
1248 __ JumpIfSmi(input, &done);
1251 __ GetObjectType(input, map, map);
1259 void LCodeGen::DoDateField(LDateField* instr) {
1260 Register
object =
ToRegister(instr->InputAt(0));
1261 Register result =
ToRegister(instr->result());
1262 Register scratch =
ToRegister(instr->TempAt(0));
1263 Smi* index = instr->index();
1264 Label runtime, done;
1267 ASSERT(!scratch.is(scratch0()));
1268 ASSERT(!scratch.is(
object));
1271 __ AbortIfSmi(
object);
1272 __ GetObjectType(
object, scratch, scratch);
1273 __ Assert(
eq,
"Trying to get date field from non-date.",
1277 if (index->value() == 0) {
1281 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
1282 __ li(scratch, Operand(stamp));
1285 __ Branch(&runtime,
ne, scratch, Operand(scratch0()));
1291 __ PrepareCallCFunction(2, scratch);
1292 __ li(a1, Operand(index));
1293 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
1299 void LCodeGen::DoBitNotI(LBitNotI* instr) {
1300 Register input =
ToRegister(instr->InputAt(0));
1301 Register result =
ToRegister(instr->result());
1302 __ Nor(result, zero_reg, Operand(input));
1306 void LCodeGen::DoThrow(LThrow* instr) {
1307 Register input_reg = EmitLoadRegister(instr->InputAt(0), at);
1309 CallRuntime(Runtime::kThrow, 1, instr);
1311 if (FLAG_debug_code) {
1312 __ stop(
"Unreachable code.");
1317 void LCodeGen::DoAddI(LAddI* instr) {
1318 LOperand* left = instr->InputAt(0);
1319 LOperand* right = instr->InputAt(1);
1320 LOperand* result = instr->result();
1323 if (!can_overflow) {
1324 if (right->IsStackSlot() || right->IsArgument()) {
1325 Register right_reg = EmitLoadRegister(right, at);
1328 ASSERT(right->IsRegister() || right->IsConstantOperand());
1332 Register overflow = scratch0();
1333 Register scratch = scratch1();
1334 if (right->IsStackSlot() ||
1335 right->IsArgument() ||
1336 right->IsConstantOperand()) {
1337 Register right_reg = EmitLoadRegister(right, scratch);
1343 ASSERT(right->IsRegister());
1351 DeoptimizeIf(
lt, instr->environment(),
overflow, Operand(zero_reg));
1356 void LCodeGen::DoArithmeticD(LArithmeticD* instr) {
1360 switch (instr->op()) {
1362 __ add_d(result, left, right);
1365 __ sub_d(result, left, right);
1368 __ mul_d(result, left, right);
1371 __ div_d(result, left, right);
1375 RegList saved_regs = a0.bit() | a1.bit() | a2.bit() | a3.bit();
1376 __ MultiPush(saved_regs);
1378 __ PrepareCallCFunction(0, 2, scratch0());
1379 __ SetCallCDoubleArguments(left, right);
1381 ExternalReference::double_fp_operation(Token::MOD, isolate()),
1384 __ GetCFunctionDoubleResult(result);
1387 __ MultiPop(saved_regs);
1397 void LCodeGen::DoArithmeticT(LArithmeticT* instr) {
1403 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
1410 int LCodeGen::GetNextEmittedBlock(
int block) {
1411 for (
int i = block + 1; i < graph()->blocks()->length(); ++i) {
1412 LLabel* label = chunk_->GetLabel(i);
1413 if (!label->HasReplacement())
return i;
1419 void LCodeGen::EmitBranch(
int left_block,
int right_block,
1420 Condition cc, Register src1,
const Operand& src2) {
1421 int next_block = GetNextEmittedBlock(current_block_);
1422 right_block = chunk_->LookupDestination(right_block);
1423 left_block = chunk_->LookupDestination(left_block);
1424 if (right_block == left_block) {
1425 EmitGoto(left_block);
1426 }
else if (left_block == next_block) {
1427 __ Branch(chunk_->GetAssemblyLabel(right_block),
1429 }
else if (right_block == next_block) {
1430 __ Branch(chunk_->GetAssemblyLabel(left_block),
cc, src1, src2);
1432 __ Branch(chunk_->GetAssemblyLabel(left_block),
cc, src1, src2);
1433 __ Branch(chunk_->GetAssemblyLabel(right_block));
1438 void LCodeGen::EmitBranchF(
int left_block,
int right_block,
1439 Condition cc, FPURegister src1, FPURegister src2) {
1440 int next_block = GetNextEmittedBlock(current_block_);
1441 right_block = chunk_->LookupDestination(right_block);
1442 left_block = chunk_->LookupDestination(left_block);
1443 if (right_block == left_block) {
1444 EmitGoto(left_block);
1445 }
else if (left_block == next_block) {
1446 __ BranchF(chunk_->GetAssemblyLabel(right_block),
NULL,
1448 }
else if (right_block == next_block) {
1449 __ BranchF(chunk_->GetAssemblyLabel(left_block),
NULL,
cc, src1, src2);
1451 __ BranchF(chunk_->GetAssemblyLabel(left_block),
NULL,
cc, src1, src2);
1452 __ Branch(chunk_->GetAssemblyLabel(right_block));
1457 void LCodeGen::DoBranch(LBranch* instr) {
1458 int true_block = chunk_->LookupDestination(instr->true_block_id());
1459 int false_block = chunk_->LookupDestination(instr->false_block_id());
1461 Representation r = instr->hydrogen()->value()->representation();
1462 if (r.IsInteger32()) {
1463 Register reg =
ToRegister(instr->InputAt(0));
1464 EmitBranch(true_block, false_block,
ne, reg, Operand(zero_reg));
1465 }
else if (r.IsDouble()) {
1468 EmitBranchF(true_block, false_block,
ne, reg, kDoubleRegZero);
1471 Register reg =
ToRegister(instr->InputAt(0));
1472 HType
type = instr->hydrogen()->value()->type();
1473 if (type.IsBoolean()) {
1474 __ LoadRoot(at, Heap::kTrueValueRootIndex);
1475 EmitBranch(true_block, false_block,
eq, reg, Operand(at));
1476 }
else if (type.IsSmi()) {
1477 EmitBranch(true_block, false_block,
ne, reg, Operand(zero_reg));
1479 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1480 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1482 ToBooleanStub::Types expected = instr->hydrogen()->expected_input_types();
1488 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1489 __ Branch(false_label,
eq, reg, Operand(at));
1493 __ LoadRoot(at, Heap::kTrueValueRootIndex);
1494 __ Branch(true_label,
eq, reg, Operand(at));
1495 __ LoadRoot(at, Heap::kFalseValueRootIndex);
1496 __ Branch(false_label,
eq, reg, Operand(at));
1500 __ LoadRoot(at, Heap::kNullValueRootIndex);
1501 __ Branch(false_label,
eq, reg, Operand(at));
1506 __ Branch(false_label,
eq, reg, Operand(zero_reg));
1507 __ JumpIfSmi(reg, true_label);
1508 }
else if (expected.NeedsMap()) {
1511 DeoptimizeIf(
eq, instr->environment(), at, Operand(zero_reg));
1514 const Register map = scratch0();
1515 if (expected.NeedsMap()) {
1517 if (expected.CanBeUndetectable()) {
1521 __ Branch(false_label,
ne, at, Operand(zero_reg));
1537 __ Branch(true_label,
ne, at, Operand(zero_reg));
1538 __ Branch(false_label);
1539 __ bind(¬_string);
1545 Label not_heap_number;
1546 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
1547 __ Branch(¬_heap_number,
ne, map, Operand(at));
1549 __ BranchF(true_label, false_label,
ne, dbl_scratch, kDoubleRegZero);
1551 __ Branch(false_label);
1552 __ bind(¬_heap_number);
1556 DeoptimizeIf(
al, instr->environment(), zero_reg, Operand(zero_reg));
1562 void LCodeGen::EmitGoto(
int block) {
1563 block = chunk_->LookupDestination(block);
1564 int next_block = GetNextEmittedBlock(current_block_);
1565 if (block != next_block) {
1566 __ jmp(chunk_->GetAssemblyLabel(block));
1571 void LCodeGen::DoGoto(LGoto* instr) {
1572 EmitGoto(instr->block_id());
1580 case Token::EQ_STRICT:
1584 cond = is_unsigned ?
lo :
lt;
1587 cond = is_unsigned ?
hi :
gt;
1590 cond = is_unsigned ?
ls :
le;
1593 cond = is_unsigned ?
hs :
ge;
1596 case Token::INSTANCEOF:
1604 void LCodeGen::DoCmpIDAndBranch(LCmpIDAndBranch* instr) {
1605 LOperand* left = instr->InputAt(0);
1606 LOperand* right = instr->InputAt(1);
1607 int false_block = chunk_->LookupDestination(instr->false_block_id());
1608 int true_block = chunk_->LookupDestination(instr->true_block_id());
1610 Condition cond = TokenToCondition(instr->op(),
false);
1612 if (left->IsConstantOperand() && right->IsConstantOperand()) {
1619 EmitGoto(next_block);
1621 if (instr->is_double()) {
1624 FPURegister left_reg = ToDoubleRegister(left);
1625 FPURegister right_reg = ToDoubleRegister(right);
1629 __ BranchF(
NULL, chunk_->GetAssemblyLabel(false_block),
eq,
1630 left_reg, right_reg);
1632 EmitBranchF(true_block, false_block, cond, left_reg, right_reg);
1635 Operand cmp_right = Operand(0);
1637 if (right->IsConstantOperand()) {
1640 }
else if (left->IsConstantOperand()) {
1650 EmitBranch(true_block, false_block, cond, cmp_left, cmp_right);
1656 void LCodeGen::DoCmpObjectEqAndBranch(LCmpObjectEqAndBranch* instr) {
1657 Register left =
ToRegister(instr->InputAt(0));
1658 Register right =
ToRegister(instr->InputAt(1));
1659 int false_block = chunk_->LookupDestination(instr->false_block_id());
1660 int true_block = chunk_->LookupDestination(instr->true_block_id());
1662 EmitBranch(true_block, false_block,
eq, left, Operand(right));
1666 void LCodeGen::DoCmpConstantEqAndBranch(LCmpConstantEqAndBranch* instr) {
1667 Register left =
ToRegister(instr->InputAt(0));
1668 int true_block = chunk_->LookupDestination(instr->true_block_id());
1669 int false_block = chunk_->LookupDestination(instr->false_block_id());
1671 EmitBranch(true_block, false_block,
eq, left,
1672 Operand(instr->hydrogen()->right()));
1677 void LCodeGen::DoIsNilAndBranch(LIsNilAndBranch* instr) {
1678 Register scratch = scratch0();
1679 Register reg =
ToRegister(instr->InputAt(0));
1680 int false_block = chunk_->LookupDestination(instr->false_block_id());
1684 if (instr->hydrogen()->representation().IsSpecialization() ||
1685 instr->hydrogen()->type().IsSmi()) {
1686 EmitGoto(false_block);
1690 int true_block = chunk_->LookupDestination(instr->true_block_id());
1693 Heap::kNullValueRootIndex :
1694 Heap::kUndefinedValueRootIndex;
1695 __ LoadRoot(at, nil_value);
1697 EmitBranch(true_block, false_block,
eq, reg, Operand(at));
1700 Heap::kUndefinedValueRootIndex :
1701 Heap::kNullValueRootIndex;
1702 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1703 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1705 __ LoadRoot(at, other_nil_value);
1707 __ JumpIfSmi(reg, false_label);
1713 EmitBranch(true_block, false_block,
ne, scratch, Operand(zero_reg));
1718 Condition LCodeGen::EmitIsObject(Register input,
1721 Label* is_not_object,
1723 __ JumpIfSmi(input, is_not_object);
1725 __ LoadRoot(temp2, Heap::kNullValueRootIndex);
1726 __ Branch(is_object,
eq, input, Operand(temp2));
1733 __ Branch(is_not_object,
ne, temp2, Operand(zero_reg));
1737 __ Branch(is_not_object,
1744 void LCodeGen::DoIsObjectAndBranch(LIsObjectAndBranch* instr) {
1745 Register reg =
ToRegister(instr->InputAt(0));
1746 Register temp1 =
ToRegister(instr->TempAt(0));
1747 Register temp2 = scratch0();
1749 int true_block = chunk_->LookupDestination(instr->true_block_id());
1750 int false_block = chunk_->LookupDestination(instr->false_block_id());
1751 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1752 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1755 EmitIsObject(reg, temp1, temp2, false_label, true_label);
1757 EmitBranch(true_block, false_block, true_cond, temp2,
1762 Condition LCodeGen::EmitIsString(Register input,
1764 Label* is_not_string) {
1765 __ JumpIfSmi(input, is_not_string);
1766 __ GetObjectType(input, temp1, temp1);
1772 void LCodeGen::DoIsStringAndBranch(LIsStringAndBranch* instr) {
1773 Register reg =
ToRegister(instr->InputAt(0));
1774 Register temp1 =
ToRegister(instr->TempAt(0));
1776 int true_block = chunk_->LookupDestination(instr->true_block_id());
1777 int false_block = chunk_->LookupDestination(instr->false_block_id());
1778 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1781 EmitIsString(reg, temp1, false_label);
1783 EmitBranch(true_block, false_block, true_cond, temp1,
1788 void LCodeGen::DoIsSmiAndBranch(LIsSmiAndBranch* instr) {
1789 int true_block = chunk_->LookupDestination(instr->true_block_id());
1790 int false_block = chunk_->LookupDestination(instr->false_block_id());
1792 Register input_reg = EmitLoadRegister(instr->InputAt(0), at);
1794 EmitBranch(true_block, false_block,
eq, at, Operand(zero_reg));
1798 void LCodeGen::DoIsUndetectableAndBranch(LIsUndetectableAndBranch* instr) {
1799 Register input =
ToRegister(instr->InputAt(0));
1800 Register temp =
ToRegister(instr->TempAt(0));
1802 int true_block = chunk_->LookupDestination(instr->true_block_id());
1803 int false_block = chunk_->LookupDestination(instr->false_block_id());
1805 __ JumpIfSmi(input, chunk_->GetAssemblyLabel(false_block));
1809 EmitBranch(true_block, false_block,
ne, at, Operand(zero_reg));
1815 case Token::EQ_STRICT:
1833 void LCodeGen::DoStringCompareAndBranch(LStringCompareAndBranch* instr) {
1835 int true_block = chunk_->LookupDestination(instr->true_block_id());
1836 int false_block = chunk_->LookupDestination(instr->false_block_id());
1839 CallCode(ic, RelocInfo::CODE_TARGET, instr);
1841 Condition condition = ComputeCompareCondition(op);
1843 EmitBranch(true_block, false_block, condition, v0, Operand(zero_reg));
1847 static InstanceType TestType(HHasInstanceTypeAndBranch* instr) {
1856 static Condition BranchCondition(HHasInstanceTypeAndBranch* instr) {
1859 if (from == to)
return eq;
1867 void LCodeGen::DoHasInstanceTypeAndBranch(LHasInstanceTypeAndBranch* instr) {
1868 Register scratch = scratch0();
1869 Register input =
ToRegister(instr->InputAt(0));
1871 int true_block = chunk_->LookupDestination(instr->true_block_id());
1872 int false_block = chunk_->LookupDestination(instr->false_block_id());
1874 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1876 __ JumpIfSmi(input, false_label);
1878 __ GetObjectType(input, scratch, scratch);
1879 EmitBranch(true_block,
1881 BranchCondition(instr->hydrogen()),
1883 Operand(TestType(instr->hydrogen())));
1887 void LCodeGen::DoGetCachedArrayIndex(LGetCachedArrayIndex* instr) {
1888 Register input =
ToRegister(instr->InputAt(0));
1889 Register result =
ToRegister(instr->result());
1891 if (FLAG_debug_code) {
1892 __ AbortIfNotString(input);
1896 __ IndexFromHash(result, result);
1900 void LCodeGen::DoHasCachedArrayIndexAndBranch(
1901 LHasCachedArrayIndexAndBranch* instr) {
1902 Register input =
ToRegister(instr->InputAt(0));
1903 Register scratch = scratch0();
1905 int true_block = chunk_->LookupDestination(instr->true_block_id());
1906 int false_block = chunk_->LookupDestination(instr->false_block_id());
1911 EmitBranch(true_block, false_block,
eq, at, Operand(zero_reg));
1917 void LCodeGen::EmitClassOfTest(Label* is_true,
1919 Handle<String>class_name,
1924 ASSERT(!input.is(temp2));
1927 __ JumpIfSmi(input, is_false);
1929 if (class_name->IsEqualTo(
CStrVector(
"Function"))) {
1939 __ GetObjectType(input, temp, temp2);
1946 __ GetObjectType(input, temp, temp2);
1957 __ GetObjectType(temp, temp2, temp2);
1958 if (class_name->IsEqualTo(
CStrVector(
"Object"))) {
1981 void LCodeGen::DoClassOfTestAndBranch(LClassOfTestAndBranch* instr) {
1982 Register input =
ToRegister(instr->InputAt(0));
1983 Register temp = scratch0();
1984 Register temp2 =
ToRegister(instr->TempAt(0));
1985 Handle<String> class_name = instr->hydrogen()->class_name();
1987 int true_block = chunk_->LookupDestination(instr->true_block_id());
1988 int false_block = chunk_->LookupDestination(instr->false_block_id());
1990 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1991 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1993 EmitClassOfTest(true_label, false_label, class_name, input, temp, temp2);
1995 EmitBranch(true_block, false_block,
eq, temp, Operand(class_name));
1999 void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) {
2000 Register reg =
ToRegister(instr->InputAt(0));
2001 Register temp =
ToRegister(instr->TempAt(0));
2002 int true_block = instr->true_block_id();
2003 int false_block = instr->false_block_id();
2006 EmitBranch(true_block, false_block,
eq, temp, Operand(instr->map()));
2010 void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
2011 Label true_label, done;
2014 Register result =
ToRegister(instr->result());
2018 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
2020 __ Branch(&true_label,
eq, result, Operand(zero_reg));
2021 __ li(result, Operand(factory()->false_value()));
2023 __ bind(&true_label);
2024 __ li(result, Operand(factory()->true_value()));
2029 void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
2030 class DeferredInstanceOfKnownGlobal:
public LDeferredCode {
2032 DeferredInstanceOfKnownGlobal(LCodeGen* codegen,
2033 LInstanceOfKnownGlobal* instr)
2034 : LDeferredCode(codegen), instr_(instr) { }
2035 virtual void Generate() {
2036 codegen()->DoDeferredInstanceOfKnownGlobal(instr_, &map_check_);
2038 virtual LInstruction* instr() {
return instr_; }
2039 Label* map_check() {
return &map_check_; }
2042 LInstanceOfKnownGlobal* instr_;
2046 DeferredInstanceOfKnownGlobal* deferred;
2047 deferred =
new(zone()) DeferredInstanceOfKnownGlobal(
this, instr);
2049 Label done, false_result;
2050 Register
object =
ToRegister(instr->InputAt(0));
2051 Register temp =
ToRegister(instr->TempAt(0));
2052 Register result =
ToRegister(instr->result());
2058 __ JumpIfSmi(
object, &false_result);
2064 Register map = temp;
2068 __ bind(deferred->map_check());
2072 Handle<JSGlobalPropertyCell> cell =
2073 factory()->NewJSGlobalPropertyCell(factory()->the_hole_value());
2074 __ li(at, Operand(Handle<Object>(cell)));
2076 __ Branch(&cache_miss,
ne, map, Operand(at));
2085 __ bind(&cache_miss);
2087 __ LoadRoot(temp, Heap::kNullValueRootIndex);
2088 __ Branch(&false_result,
eq,
object, Operand(temp));
2091 Condition cc =
__ IsObjectStringType(
object, temp, temp);
2092 __ Branch(&false_result, cc, temp, Operand(zero_reg));
2095 __ Branch(deferred->entry());
2097 __ bind(&false_result);
2098 __ LoadRoot(result, Heap::kFalseValueRootIndex);
2102 __ bind(deferred->exit());
2107 void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
2109 Register result =
ToRegister(instr->result());
2119 InstanceofStub stub(flags);
2121 PushSafepointRegistersScope scope(
this, Safepoint::kWithRegisters);
2126 Register temp =
ToRegister(instr->TempAt(0));
2129 static const int kAdditionalDelta = 7;
2130 int delta = masm_->InstructionsGeneratedSince(map_check) + kAdditionalDelta;
2131 Label before_push_delta;
2132 __ bind(&before_push_delta);
2136 __ StoreToSafepointRegisterSlot(temp, temp);
2138 CallCodeGeneric(stub.GetCode(),
2139 RelocInfo::CODE_TARGET,
2141 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
2142 LEnvironment* env = instr->GetDeferredLazyDeoptimizationEnvironment();
2143 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
2146 __ StoreToSafepointRegisterSlot(result, result);
2150 void LCodeGen::DoCmpT(LCmpT* instr) {
2154 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2157 Condition condition = ComputeCompareCondition(op);
2163 __ LoadRoot(
ToRegister(instr->result()), Heap::kTrueValueRootIndex);
2164 __ LoadRoot(
ToRegister(instr->result()), Heap::kFalseValueRootIndex);
2165 ASSERT_EQ(3, masm()->InstructionsGeneratedSince(&done));
2170 void LCodeGen::DoReturn(LReturn* instr) {
2175 __ CallRuntime(Runtime::kTraceExit, 1);
2180 __ Addu(
sp,
sp, Operand(sp_delta));
2185 void LCodeGen::DoLoadGlobalCell(LLoadGlobalCell* instr) {
2186 Register result =
ToRegister(instr->result());
2187 __ li(at, Operand(Handle<Object>(instr->hydrogen()->cell())));
2189 if (instr->hydrogen()->RequiresHoleCheck()) {
2190 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
2191 DeoptimizeIf(
eq, instr->environment(), result, Operand(at));
2196 void LCodeGen::DoLoadGlobalGeneric(LLoadGlobalGeneric* instr) {
2200 __ li(a2, Operand(instr->name()));
2201 RelocInfo::Mode mode = instr->for_typeof() ? RelocInfo::CODE_TARGET
2202 : RelocInfo::CODE_TARGET_CONTEXT;
2203 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
2204 CallCode(ic, mode, instr);
2208 void LCodeGen::DoStoreGlobalCell(LStoreGlobalCell* instr) {
2210 Register cell = scratch0();
2213 __ li(cell, Operand(instr->hydrogen()->cell()));
2219 if (instr->hydrogen()->RequiresHoleCheck()) {
2221 Register payload =
ToRegister(instr->TempAt(0));
2223 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
2224 DeoptimizeIf(
eq, instr->environment(), payload, Operand(at));
2233 void LCodeGen::DoStoreGlobalGeneric(LStoreGlobalGeneric* instr) {
2237 __ li(a2, Operand(instr->name()));
2238 Handle<Code> ic = (instr->strict_mode_flag() ==
kStrictMode)
2239 ? isolate()->builtins()->StoreIC_Initialize_Strict()
2240 : isolate()->builtins()->StoreIC_Initialize();
2241 CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr);
2245 void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) {
2246 Register context =
ToRegister(instr->context());
2247 Register result =
ToRegister(instr->result());
2250 if (instr->hydrogen()->RequiresHoleCheck()) {
2251 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
2253 if (instr->hydrogen()->DeoptimizesOnHole()) {
2254 DeoptimizeIf(
eq, instr->environment(), result, Operand(at));
2257 __ Branch(&is_not_hole,
ne, result, Operand(at));
2258 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
2259 __ bind(&is_not_hole);
2265 void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) {
2266 Register context =
ToRegister(instr->context());
2268 Register scratch = scratch0();
2271 Label skip_assignment;
2273 if (instr->hydrogen()->RequiresHoleCheck()) {
2274 __ lw(scratch, target);
2275 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
2277 if (instr->hydrogen()->DeoptimizesOnHole()) {
2278 DeoptimizeIf(
eq, instr->environment(), scratch, Operand(at));
2280 __ Branch(&skip_assignment,
ne, scratch, Operand(at));
2284 __ sw(value, target);
2285 if (instr->hydrogen()->NeedsWriteBarrier()) {
2286 HType type = instr->hydrogen()->value()->type();
2289 __ RecordWriteContextSlot(context,
2299 __ bind(&skip_assignment);
2303 void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) {
2304 Register
object =
ToRegister(instr->InputAt(0));
2305 Register result =
ToRegister(instr->result());
2306 if (instr->hydrogen()->is_in_object()) {
2315 void LCodeGen::EmitLoadFieldOrConstantFunction(Register result,
2318 Handle<String>
name,
2319 LEnvironment* env) {
2320 LookupResult lookup(isolate());
2321 type->LookupInDescriptors(
NULL, *name, &lookup);
2322 ASSERT(lookup.IsFound() || lookup.IsCacheable());
2323 if (lookup.IsFound() && lookup.type() ==
FIELD) {
2324 int index = lookup.GetLocalFieldIndexFromMap(*type);
2336 Handle<JSFunction>
function(lookup.GetConstantFunctionFromMap(*type));
2337 __ LoadHeapObject(result,
function);
2343 while (current != heap->null_value()) {
2344 Handle<HeapObject> link(current);
2345 __ LoadHeapObject(result, link);
2347 DeoptimizeIf(
ne, env,
2351 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
2356 void LCodeGen::DoLoadNamedFieldPolymorphic(LLoadNamedFieldPolymorphic* instr) {
2357 Register
object =
ToRegister(instr->object());
2358 Register result =
ToRegister(instr->result());
2359 Register object_map = scratch0();
2361 int map_count = instr->hydrogen()->types()->length();
2362 bool need_generic = instr->hydrogen()->need_generic();
2364 if (map_count == 0 && !need_generic) {
2365 DeoptimizeIf(
al, instr->environment());
2368 Handle<String> name = instr->hydrogen()->name();
2371 for (
int i = 0; i < map_count; ++i) {
2372 bool last = (i == map_count - 1);
2373 Handle<Map> map = instr->hydrogen()->types()->at(i);
2375 __ CompareMapAndBranch(
2376 object_map, map, &check_passed,
2378 if (last && !need_generic) {
2379 DeoptimizeIf(
al, instr->environment());
2380 __ bind(&check_passed);
2381 EmitLoadFieldOrConstantFunction(
2382 result,
object, map, name, instr->environment());
2386 __ bind(&check_passed);
2387 EmitLoadFieldOrConstantFunction(
2388 result,
object, map, name, instr->environment());
2394 __ li(a2, Operand(name));
2395 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
2396 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2402 void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) {
2407 __ li(a2, Operand(instr->name()));
2408 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
2409 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2413 void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) {
2414 Register scratch = scratch0();
2415 Register
function =
ToRegister(instr->function());
2416 Register result =
ToRegister(instr->result());
2420 __ GetObjectType(
function, result, scratch);
2427 __ Branch(&non_instance,
ne, scratch, Operand(zero_reg));
2434 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
2435 DeoptimizeIf(
eq, instr->environment(), result, Operand(at));
2439 __ GetObjectType(result, scratch, scratch);
2448 __ bind(&non_instance);
2456 void LCodeGen::DoLoadElements(LLoadElements* instr) {
2457 Register result =
ToRegister(instr->result());
2458 Register input =
ToRegister(instr->InputAt(0));
2459 Register scratch = scratch0();
2462 if (FLAG_debug_code) {
2465 __ LoadRoot(at, Heap::kFixedArrayMapRootIndex);
2467 __ LoadRoot(at, Heap::kFixedCOWArrayMapRootIndex);
2468 __ Branch(&done,
eq, scratch, Operand(at));
2473 __ Branch(&fail,
lt, scratch,
2475 __ Branch(&done,
le, scratch,
2477 __ Branch(&fail,
lt, scratch,
2479 __ Branch(&done,
le, scratch,
2482 __ Abort(
"Check for fast or external elements failed.");
2488 void LCodeGen::DoLoadExternalArrayPointer(
2489 LLoadExternalArrayPointer* instr) {
2490 Register to_reg =
ToRegister(instr->result());
2491 Register from_reg =
ToRegister(instr->InputAt(0));
2497 void LCodeGen::DoAccessArgumentsAt(LAccessArgumentsAt* instr) {
2498 Register arguments =
ToRegister(instr->arguments());
2499 Register length =
ToRegister(instr->length());
2501 Register result =
ToRegister(instr->result());
2508 DeoptimizeIf(
ls, instr->environment(), length, Operand(index));
2512 __ subu(length, length, index);
2513 __ Addu(length, length, Operand(1));
2515 __ Addu(at, arguments, Operand(length));
2520 void LCodeGen::DoLoadKeyedFastElement(LLoadKeyedFastElement* instr) {
2521 Register elements =
ToRegister(instr->elements());
2522 Register key = EmitLoadRegister(instr->key(), scratch0());
2523 Register result =
ToRegister(instr->result());
2524 Register scratch = scratch0();
2528 __ addu(scratch, elements, scratch);
2534 if (instr->hydrogen()->RequiresHoleCheck()) {
2537 DeoptimizeIf(
ne, instr->environment(), scratch, Operand(zero_reg));
2539 __ LoadRoot(scratch, Heap::kTheHoleValueRootIndex);
2540 DeoptimizeIf(
eq, instr->environment(), result, Operand(scratch));
2546 void LCodeGen::DoLoadKeyedFastDoubleElement(
2547 LLoadKeyedFastDoubleElement* instr) {
2548 Register elements =
ToRegister(instr->elements());
2549 bool key_is_constant = instr->key()->IsConstantOperand();
2552 Register scratch = scratch0();
2556 int constant_key = 0;
2557 if (key_is_constant) {
2559 if (constant_key & 0xF0000000) {
2560 Abort(
"array index constant value too big.");
2566 if (key_is_constant) {
2567 __ Addu(elements, elements,
2568 Operand(((constant_key + instr->additional_index()) << shift_size) +
2571 __ sll(scratch, key, shift_size);
2572 __ Addu(elements, elements, Operand(scratch));
2573 __ Addu(elements, elements,
2575 (instr->additional_index() << shift_size)));
2578 if (instr->hydrogen()->RequiresHoleCheck()) {
2587 void LCodeGen::DoLoadKeyedSpecializedArrayElement(
2588 LLoadKeyedSpecializedArrayElement* instr) {
2589 Register external_pointer =
ToRegister(instr->external_pointer());
2592 bool key_is_constant = instr->key()->IsConstantOperand();
2593 int constant_key = 0;
2594 if (key_is_constant) {
2596 if (constant_key & 0xF0000000) {
2597 Abort(
"array index constant value too big.");
2603 int additional_offset = instr->additional_index() << shift_size;
2607 FPURegister result = ToDoubleRegister(instr->result());
2608 if (key_is_constant) {
2609 __ Addu(scratch0(), external_pointer, constant_key << shift_size);
2611 __ sll(scratch0(), key, shift_size);
2612 __ Addu(scratch0(), scratch0(), external_pointer);
2616 __ lwc1(result,
MemOperand(scratch0(), additional_offset));
2617 __ cvt_d_s(result, result);
2619 __ ldc1(result,
MemOperand(scratch0(), additional_offset));
2622 Register result =
ToRegister(instr->result());
2623 Register scratch = scratch0();
2624 if (instr->additional_index() != 0 && !key_is_constant) {
2625 __ Addu(scratch, key, instr->additional_index());
2628 if (key_is_constant) {
2631 (constant_key << shift_size) + additional_offset);
2633 if (instr->additional_index() == 0) {
2634 __ sll(scratch, key, shift_size);
2636 __ sll(scratch, scratch, shift_size);
2638 __ Addu(scratch, scratch, external_pointer);
2641 switch (elements_kind) {
2643 __ lb(result, mem_operand);
2647 __ lbu(result, mem_operand);
2650 __ lh(result, mem_operand);
2653 __ lhu(result, mem_operand);
2656 __ lw(result, mem_operand);
2659 __ lw(result, mem_operand);
2664 result, Operand(0x80000000));
2683 void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) {
2687 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
2688 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2692 void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) {
2693 Register scratch = scratch0();
2694 Register temp = scratch1();
2695 Register result =
ToRegister(instr->result());
2697 if (instr->hydrogen()->from_inlined()) {
2698 __ Subu(result,
sp, 2 * kPointerSize);
2701 Label done, adapted;
2708 __ Movn(result,
fp, temp);
2709 __ Movz(result, scratch, temp);
2714 void LCodeGen::DoArgumentsLength(LArgumentsLength* instr) {
2715 Register elem =
ToRegister(instr->InputAt(0));
2716 Register result =
ToRegister(instr->result());
2721 __ Addu(result, zero_reg, Operand(scope()->num_parameters()));
2722 __ Branch(&done,
eq,
fp, Operand(elem));
2728 __ SmiUntag(result);
2735 void LCodeGen::DoWrapReceiver(LWrapReceiver* instr) {
2736 Register receiver =
ToRegister(instr->receiver());
2737 Register
function =
ToRegister(instr->function());
2738 Register scratch = scratch0();
2743 Label global_object, receiver_ok;
2753 int32_t strict_mode_function_mask =
2756 __ And(scratch, scratch, Operand(strict_mode_function_mask | native_mask));
2757 __ Branch(&receiver_ok,
ne, scratch, Operand(zero_reg));
2760 __ LoadRoot(scratch, Heap::kNullValueRootIndex);
2761 __ Branch(&global_object,
eq, receiver, Operand(scratch));
2762 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
2763 __ Branch(&global_object,
eq, receiver, Operand(scratch));
2767 DeoptimizeIf(
eq, instr->environment(), scratch, Operand(zero_reg));
2769 __ GetObjectType(receiver, scratch, scratch);
2770 DeoptimizeIf(
lt, instr->environment(),
2772 __ Branch(&receiver_ok);
2774 __ bind(&global_object);
2778 __ bind(&receiver_ok);
2781 void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
2782 Register receiver =
ToRegister(instr->receiver());
2783 Register
function =
ToRegister(instr->function());
2784 Register length =
ToRegister(instr->length());
2785 Register elements =
ToRegister(instr->elements());
2786 Register scratch = scratch0();
2793 const uint32_t kArgumentsLimit = 1 *
KB;
2794 DeoptimizeIf(
hi, instr->environment(), length, Operand(kArgumentsLimit));
2799 __ Move(receiver, length);
2801 __ Addu(elements, elements, Operand(1 * kPointerSize));
2808 __ sll(scratch, length, 2);
2810 __ Addu(scratch, elements, scratch);
2813 __ Subu(length, length, Operand(1));
2815 __ sll(scratch, length, 2);
2818 ASSERT(instr->HasPointerMap());
2819 LPointerMap* pointers = instr->pointer_map();
2820 RecordPosition(pointers->position());
2821 SafepointGenerator safepoint_generator(
2822 this, pointers, Safepoint::kLazyDeopt);
2825 ParameterCount actual(receiver);
2832 void LCodeGen::DoPushArgument(LPushArgument* instr) {
2833 LOperand* argument = instr->InputAt(0);
2834 if (argument->IsDoubleRegister() || argument->IsDoubleStackSlot()) {
2835 Abort(
"DoPushArgument not implemented for double type.");
2837 Register argument_reg = EmitLoadRegister(argument, at);
2838 __ push(argument_reg);
2843 void LCodeGen::DoDrop(LDrop* instr) {
2844 __ Drop(instr->count());
2848 void LCodeGen::DoThisFunction(LThisFunction* instr) {
2849 Register result =
ToRegister(instr->result());
2850 __ LoadHeapObject(result, instr->hydrogen()->closure());
2854 void LCodeGen::DoContext(LContext* instr) {
2855 Register result =
ToRegister(instr->result());
2860 void LCodeGen::DoOuterContext(LOuterContext* instr) {
2861 Register context =
ToRegister(instr->context());
2862 Register result =
ToRegister(instr->result());
2868 void LCodeGen::DoDeclareGlobals(LDeclareGlobals* instr) {
2869 __ LoadHeapObject(scratch0(), instr->hydrogen()->pairs());
2870 __ li(scratch1(), Operand(
Smi::FromInt(instr->hydrogen()->flags())));
2872 __ Push(
cp, scratch0(), scratch1());
2873 CallRuntime(Runtime::kDeclareGlobals, 3, instr);
2877 void LCodeGen::DoGlobalObject(LGlobalObject* instr) {
2878 Register result =
ToRegister(instr->result());
2883 void LCodeGen::DoGlobalReceiver(LGlobalReceiver* instr) {
2884 Register global =
ToRegister(instr->global());
2885 Register result =
ToRegister(instr->result());
2890 void LCodeGen::CallKnownFunction(Handle<JSFunction>
function,
2892 LInstruction* instr,
2895 bool can_invoke_directly = !
function->NeedsArgumentsAdaption() ||
2896 function->shared()->formal_parameter_count() == arity;
2898 LPointerMap* pointers = instr->pointer_map();
2899 RecordPosition(pointers->position());
2901 if (can_invoke_directly) {
2902 if (a1_state == A1_UNINITIALIZED) {
2903 __ LoadHeapObject(a1,
function);
2907 bool change_context =
2908 (info()->closure()->context() !=
function->context()) ||
2909 scope()->contains_with() ||
2910 (scope()->num_heap_slots() > 0);
2911 if (change_context) {
2917 if (!function->NeedsArgumentsAdaption()) {
2918 __ li(a0, Operand(arity));
2922 __ SetCallKind(t1, call_kind);
2927 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT);
2929 SafepointGenerator generator(
this, pointers, Safepoint::kLazyDeopt);
2930 ParameterCount count(arity);
2931 __ InvokeFunction(
function, count,
CALL_FUNCTION, generator, call_kind);
2939 void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) {
2942 CallKnownFunction(instr->function(),
2950 void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr) {
2951 Register input =
ToRegister(instr->InputAt(0));
2952 Register result =
ToRegister(instr->result());
2953 Register scratch = scratch0();
2957 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
2958 DeoptimizeIf(
ne, instr->environment(), scratch, Operand(at));
2961 Register exponent = scratch0();
2966 __ Move(result, input);
2968 __ Branch(&done,
eq, at, Operand(zero_reg));
2973 PushSafepointRegistersScope scope(
this, Safepoint::kWithRegisters);
2977 Register tmp1 = input.is(a1) ? a0 : a1;
2978 Register tmp2 = input.is(a2) ? a0 : a2;
2979 Register tmp3 = input.is(a3) ? a0 : a3;
2980 Register tmp4 = input.is(t0) ? a0 : t0;
2984 Label allocated, slow;
2985 __ LoadRoot(tmp4, Heap::kHeapNumberMapRootIndex);
2986 __ AllocateHeapNumber(tmp1, tmp2, tmp3, tmp4, &slow);
2987 __ Branch(&allocated);
2992 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr);
2997 __ LoadFromSafepointRegisterSlot(input, input);
3000 __ bind(&allocated);
3008 __ StoreToSafepointRegisterSlot(tmp1, result);
3015 void LCodeGen::EmitIntegerMathAbs(LUnaryMathOperation* instr) {
3016 Register input =
ToRegister(instr->InputAt(0));
3017 Register result =
ToRegister(instr->result());
3021 __ mov(result, input);
3022 ASSERT_EQ(2, masm()->InstructionsGeneratedSince(&done));
3023 __ subu(result, zero_reg, input);
3025 DeoptimizeIf(
lt, instr->environment(), result, Operand(zero_reg));
3030 void LCodeGen::DoMathAbs(LUnaryMathOperation* instr) {
3032 class DeferredMathAbsTaggedHeapNumber:
public LDeferredCode {
3034 DeferredMathAbsTaggedHeapNumber(LCodeGen* codegen,
3035 LUnaryMathOperation* instr)
3036 : LDeferredCode(codegen), instr_(instr) { }
3037 virtual void Generate() {
3038 codegen()->DoDeferredMathAbsTaggedHeapNumber(instr_);
3040 virtual LInstruction* instr() {
return instr_; }
3042 LUnaryMathOperation* instr_;
3045 Representation r = instr->hydrogen()->value()->representation();
3047 FPURegister input = ToDoubleRegister(instr->InputAt(0));
3048 FPURegister result = ToDoubleRegister(instr->result());
3049 __ abs_d(result, input);
3050 }
else if (r.IsInteger32()) {
3051 EmitIntegerMathAbs(instr);
3054 DeferredMathAbsTaggedHeapNumber* deferred =
3055 new(zone()) DeferredMathAbsTaggedHeapNumber(
this, instr);
3056 Register input =
ToRegister(instr->InputAt(0));
3058 __ JumpIfNotSmi(input, deferred->entry());
3060 EmitIntegerMathAbs(instr);
3061 __ bind(deferred->exit());
3066 void LCodeGen::DoMathFloor(LUnaryMathOperation* instr) {
3068 Register result =
ToRegister(instr->result());
3069 FPURegister single_scratch = double_scratch0().low();
3070 Register scratch1 = scratch0();
3071 Register except_flag =
ToRegister(instr->TempAt(0));
3080 DeoptimizeIf(
ne, instr->environment(), except_flag, Operand(zero_reg));
3083 __ mfc1(result, single_scratch);
3088 __ Branch(&done,
ne, result, Operand(zero_reg));
3089 __ mfc1(scratch1, input.high());
3091 DeoptimizeIf(
ne, instr->environment(), scratch1, Operand(zero_reg));
3097 void LCodeGen::DoMathRound(LUnaryMathOperation* instr) {
3099 Register result =
ToRegister(instr->result());
3100 Register scratch = scratch0();
3101 Label done, check_sign_on_zero;
3104 __ mfc1(result, input.high());
3113 __ mov(result, zero_reg);
3115 __ Branch(&check_sign_on_zero);
3123 DeoptimizeIf(
ge, instr->environment(), scratch,
3129 __ Move(double_scratch0(), 0.5);
3130 __ add_d(double_scratch0(), input, double_scratch0());
3134 __ mfc1(result, double_scratch0().high());
3135 __ Xor(result, result, Operand(scratch));
3138 DeoptimizeIf(
lt, instr->environment(), result,
3144 __ Branch(&skip2,
ge, result, Operand(zero_reg));
3145 __ mov(result, zero_reg);
3150 Register except_flag = scratch;
3153 double_scratch0().low(),
3158 DeoptimizeIf(
ne, instr->environment(), except_flag, Operand(zero_reg));
3160 __ mfc1(result, double_scratch0().low());
3164 __ Branch(&done,
ne, result, Operand(zero_reg));
3165 __ bind(&check_sign_on_zero);
3166 __ mfc1(scratch, input.high());
3168 DeoptimizeIf(
ne, instr->environment(), scratch, Operand(zero_reg));
3174 void LCodeGen::DoMathSqrt(LUnaryMathOperation* instr) {
3177 __ sqrt_d(result, input);
3181 void LCodeGen::DoMathPowHalf(LUnaryMathOperation* instr) {
3186 ASSERT(!input.is(result));
3196 __ neg_d(result, temp);
3199 __ add_d(result, input, kDoubleRegZero);
3200 __ sqrt_d(result, result);
3205 void LCodeGen::DoPower(LPower* instr) {
3206 Representation exponent_type = instr->hydrogen()->right()->representation();
3209 ASSERT(!instr->InputAt(1)->IsDoubleRegister() ||
3210 ToDoubleRegister(instr->InputAt(1)).is(
f4));
3211 ASSERT(!instr->InputAt(1)->IsRegister() ||
3213 ASSERT(ToDoubleRegister(instr->InputAt(0)).is(
f2));
3214 ASSERT(ToDoubleRegister(instr->result()).is(
f0));
3216 if (exponent_type.IsTagged()) {
3218 __ JumpIfSmi(a2, &no_deopt);
3220 DeoptimizeIf(
ne, instr->environment(), t3, Operand(at));
3224 }
else if (exponent_type.IsInteger32()) {
3228 ASSERT(exponent_type.IsDouble());
3235 void LCodeGen::DoRandom(LRandom* instr) {
3236 class DeferredDoRandom:
public LDeferredCode {
3238 DeferredDoRandom(LCodeGen* codegen, LRandom* instr)
3239 : LDeferredCode(codegen), instr_(instr) { }
3240 virtual void Generate() { codegen()->DoDeferredRandom(instr_); }
3241 virtual LInstruction* instr() {
return instr_; }
3246 DeferredDoRandom* deferred =
new(zone()) DeferredDoRandom(
this, instr);
3249 ASSERT(ToDoubleRegister(instr->result()).is(
f0));
3252 static const int kSeedSize =
sizeof(uint32_t);
3256 static const int kRandomSeedOffset =
3263 __ Branch(deferred->entry(),
eq, a1, Operand(zero_reg));
3270 __ And(a3, a1, Operand(0xFFFF));
3271 __ li(t0, Operand(18273));
3274 __ Addu(a1, a3, a1);
3279 __ And(a3, a0, Operand(0xFFFF));
3280 __ li(t0, Operand(36969));
3283 __ Addu(a0, a3, a0);
3288 __ And(a0, a0, Operand(0x3FFFF));
3290 __ Addu(v0, a0, a1);
3292 __ bind(deferred->exit());
3295 __ li(a2, Operand(0x41300000));
3297 __ Move(
f12, v0, a2);
3299 __ Move(
f14, zero_reg, a2);
3304 void LCodeGen::DoDeferredRandom(LRandom* instr) {
3305 __ PrepareCallCFunction(1, scratch0());
3306 __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1);
3311 void LCodeGen::DoMathLog(LUnaryMathOperation* instr) {
3312 ASSERT(ToDoubleRegister(instr->result()).is(
f4));
3315 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3319 void LCodeGen::DoMathTan(LUnaryMathOperation* instr) {
3320 ASSERT(ToDoubleRegister(instr->result()).is(
f4));
3323 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3327 void LCodeGen::DoMathCos(LUnaryMathOperation* instr) {
3328 ASSERT(ToDoubleRegister(instr->result()).is(
f4));
3331 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3335 void LCodeGen::DoMathSin(LUnaryMathOperation* instr) {
3336 ASSERT(ToDoubleRegister(instr->result()).is(
f4));
3339 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3343 void LCodeGen::DoUnaryMathOperation(LUnaryMathOperation* instr) {
3344 switch (instr->op()) {
3358 DoMathPowHalf(instr);
3373 Abort(
"Unimplemented type of LUnaryMathOperation.");
3379 void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) {
3381 ASSERT(instr->HasPointerMap());
3383 if (instr->known_function().is_null()) {
3384 LPointerMap* pointers = instr->pointer_map();
3385 RecordPosition(pointers->position());
3386 SafepointGenerator generator(
this, pointers, Safepoint::kLazyDeopt);
3387 ParameterCount count(instr->arity());
3391 CallKnownFunction(instr->known_function(),
3395 A1_CONTAINS_TARGET);
3400 void LCodeGen::DoCallKeyed(LCallKeyed* instr) {
3403 int arity = instr->arity();
3405 isolate()->stub_cache()->ComputeKeyedCallInitialize(arity);
3406 CallCode(ic, RelocInfo::CODE_TARGET, instr);
3411 void LCodeGen::DoCallNamed(LCallNamed* instr) {
3414 int arity = instr->arity();
3415 RelocInfo::Mode mode = RelocInfo::CODE_TARGET;
3417 isolate()->stub_cache()->ComputeCallInitialize(arity, mode);
3418 __ li(a2, Operand(instr->name()));
3419 CallCode(ic, mode, instr);
3425 void LCodeGen::DoCallFunction(LCallFunction* instr) {
3429 int arity = instr->arity();
3431 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3436 void LCodeGen::DoCallGlobal(LCallGlobal* instr) {
3439 int arity = instr->arity();
3440 RelocInfo::Mode mode = RelocInfo::CODE_TARGET_CONTEXT;
3442 isolate()->stub_cache()->ComputeCallInitialize(arity, mode);
3443 __ li(a2, Operand(instr->name()));
3444 CallCode(ic, mode, instr);
3449 void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) {
3451 CallKnownFunction(instr->target(),
3459 void LCodeGen::DoCallNew(LCallNew* instr) {
3464 __ li(a0, Operand(instr->arity()));
3465 CallCode(stub.GetCode(), RelocInfo::CONSTRUCT_CALL, instr);
3469 void LCodeGen::DoCallRuntime(LCallRuntime* instr) {
3470 CallRuntime(instr->function(), instr->arity(), instr);
3474 void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) {
3475 Register
object =
ToRegister(instr->object());
3477 Register scratch = scratch0();
3478 int offset = instr->offset();
3480 ASSERT(!
object.is(value));
3482 if (!instr->transition().is_null()) {
3483 __ li(scratch, Operand(instr->transition()));
3485 if (instr->hydrogen()->NeedsWriteBarrierForMap()) {
3486 Register temp =
ToRegister(instr->TempAt(0));
3488 __ RecordWriteField(
object,
3500 HType type = instr->hydrogen()->value()->type();
3503 if (instr->is_in_object()) {
3505 if (instr->hydrogen()->NeedsWriteBarrier()) {
3507 __ RecordWriteField(
object,
3519 if (instr->hydrogen()->NeedsWriteBarrier()) {
3522 __ RecordWriteField(scratch,
3535 void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) {
3540 __ li(a2, Operand(instr->name()));
3541 Handle<Code> ic = (instr->strict_mode_flag() ==
kStrictMode)
3542 ? isolate()->builtins()->StoreIC_Initialize_Strict()
3543 : isolate()->builtins()->StoreIC_Initialize();
3544 CallCode(ic, RelocInfo::CODE_TARGET, instr);
3548 void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) {
3550 instr->environment(),
3556 void LCodeGen::DoStoreKeyedFastElement(LStoreKeyedFastElement* instr) {
3558 Register elements =
ToRegister(instr->object());
3559 Register key = instr->key()->IsRegister() ?
ToRegister(instr->key()) :
no_reg;
3560 Register scratch = scratch0();
3563 if (instr->key()->IsConstantOperand()) {
3564 ASSERT(!instr->hydrogen()->NeedsWriteBarrier());
3567 (ToInteger32(const_operand) + instr->additional_index()) * kPointerSize
3572 __ addu(scratch, elements, scratch);
3573 if (instr->additional_index() != 0) {
3581 if (instr->hydrogen()->NeedsWriteBarrier()) {
3582 HType type = instr->hydrogen()->value()->type();
3587 __ RecordWrite(elements,
3598 void LCodeGen::DoStoreKeyedFastDoubleElement(
3599 LStoreKeyedFastDoubleElement* instr) {
3601 Register elements =
ToRegister(instr->elements());
3603 Register scratch = scratch0();
3604 bool key_is_constant = instr->key()->IsConstantOperand();
3605 int constant_key = 0;
3610 if (key_is_constant) {
3612 if (constant_key & 0xF0000000) {
3613 Abort(
"array index constant value too big.");
3619 if (key_is_constant) {
3620 __ Addu(scratch, elements, Operand((constant_key << shift_size) +
3623 __ sll(scratch, key, shift_size);
3624 __ Addu(scratch, elements, Operand(scratch));
3625 __ Addu(scratch, scratch,
3629 if (instr->NeedsCanonicalization()) {
3632 __ BranchF(
NULL, &is_nan,
eq, value, value);
3633 __ Branch(¬_nan);
3641 __ sdc1(value,
MemOperand(scratch, instr->additional_index() << shift_size));
3645 void LCodeGen::DoStoreKeyedSpecializedArrayElement(
3646 LStoreKeyedSpecializedArrayElement* instr) {
3648 Register external_pointer =
ToRegister(instr->external_pointer());
3651 bool key_is_constant = instr->key()->IsConstantOperand();
3652 int constant_key = 0;
3653 if (key_is_constant) {
3655 if (constant_key & 0xF0000000) {
3656 Abort(
"array index constant value too big.");
3662 int additional_offset = instr->additional_index() << shift_size;
3666 FPURegister value(ToDoubleRegister(instr->value()));
3667 if (key_is_constant) {
3668 __ Addu(scratch0(), external_pointer, constant_key << shift_size);
3670 __ sll(scratch0(), key, shift_size);
3671 __ Addu(scratch0(), scratch0(), external_pointer);
3675 __ cvt_s_d(double_scratch0(), value);
3676 __ swc1(double_scratch0(),
MemOperand(scratch0(), additional_offset));
3678 __ sdc1(value,
MemOperand(scratch0(), additional_offset));
3682 Register scratch = scratch0();
3683 if (instr->additional_index() != 0 && !key_is_constant) {
3684 __ Addu(scratch, key, instr->additional_index());
3687 if (key_is_constant) {
3689 ((constant_key + instr->additional_index())
3692 if (instr->additional_index() == 0) {
3693 __ sll(scratch, key, shift_size);
3695 __ sll(scratch, scratch, shift_size);
3697 __ Addu(scratch, scratch, external_pointer);
3700 switch (elements_kind) {
3704 __ sb(value, mem_operand);
3708 __ sh(value, mem_operand);
3712 __ sw(value, mem_operand);
3730 void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) {
3735 Handle<Code> ic = (instr->strict_mode_flag() ==
kStrictMode)
3736 ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
3737 : isolate()->builtins()->KeyedStoreIC_Initialize();
3738 CallCode(ic, RelocInfo::CODE_TARGET, instr);
3742 void LCodeGen::DoTransitionElementsKind(LTransitionElementsKind* instr) {
3743 Register object_reg =
ToRegister(instr->object());
3744 Register new_map_reg =
ToRegister(instr->new_map_reg());
3745 Register scratch = scratch0();
3747 Handle<Map> from_map = instr->original_map();
3748 Handle<Map> to_map = instr->transitioned_map();
3754 Label not_applicable;
3756 __ Branch(¬_applicable,
ne, scratch, Operand(from_map));
3758 __ li(new_map_reg, Operand(to_map));
3766 Register fixed_object_reg =
ToRegister(instr->temp_reg());
3767 ASSERT(fixed_object_reg.is(a2));
3768 ASSERT(new_map_reg.is(a3));
3769 __ mov(fixed_object_reg, object_reg);
3770 CallCode(isolate()->builtins()->TransitionElementsSmiToDouble(),
3771 RelocInfo::CODE_TARGET, instr);
3774 Register fixed_object_reg =
ToRegister(instr->temp_reg());
3775 ASSERT(fixed_object_reg.is(a2));
3776 ASSERT(new_map_reg.is(a3));
3777 __ mov(fixed_object_reg, object_reg);
3778 CallCode(isolate()->builtins()->TransitionElementsDoubleToObject(),
3779 RelocInfo::CODE_TARGET, instr);
3783 __ bind(¬_applicable);
3787 void LCodeGen::DoStringAdd(LStringAdd* instr) {
3791 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3795 void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) {
3796 class DeferredStringCharCodeAt:
public LDeferredCode {
3798 DeferredStringCharCodeAt(LCodeGen* codegen, LStringCharCodeAt* instr)
3799 : LDeferredCode(codegen), instr_(instr) { }
3800 virtual void Generate() { codegen()->DoDeferredStringCharCodeAt(instr_); }
3801 virtual LInstruction* instr() {
return instr_; }
3803 LStringCharCodeAt* instr_;
3806 DeferredStringCharCodeAt* deferred =
3807 new(zone()) DeferredStringCharCodeAt(
this, instr);
3813 __ bind(deferred->exit());
3817 void LCodeGen::DoDeferredStringCharCodeAt(LStringCharCodeAt* instr) {
3818 Register
string =
ToRegister(instr->string());
3819 Register result =
ToRegister(instr->result());
3820 Register scratch = scratch0();
3825 __ mov(result, zero_reg);
3827 PushSafepointRegistersScope scope(
this, Safepoint::kWithRegisters);
3831 if (instr->index()->IsConstantOperand()) {
3840 CallRuntimeFromDeferred(Runtime::kStringCharCodeAt, 2, instr);
3841 if (FLAG_debug_code) {
3842 __ AbortIfNotSmi(v0);
3845 __ StoreToSafepointRegisterSlot(v0, result);
3849 void LCodeGen::DoStringCharFromCode(LStringCharFromCode* instr) {
3850 class DeferredStringCharFromCode:
public LDeferredCode {
3852 DeferredStringCharFromCode(LCodeGen* codegen, LStringCharFromCode* instr)
3853 : LDeferredCode(codegen), instr_(instr) { }
3854 virtual void Generate() { codegen()->DoDeferredStringCharFromCode(instr_); }
3855 virtual LInstruction* instr() {
return instr_; }
3857 LStringCharFromCode* instr_;
3860 DeferredStringCharFromCode* deferred =
3861 new(zone()) DeferredStringCharFromCode(
this, instr);
3863 ASSERT(instr->hydrogen()->value()->representation().IsInteger32());
3864 Register char_code =
ToRegister(instr->char_code());
3865 Register result =
ToRegister(instr->result());
3866 Register scratch = scratch0();
3867 ASSERT(!char_code.is(result));
3869 __ Branch(deferred->entry(),
hi,
3871 __ LoadRoot(result, Heap::kSingleCharacterStringCacheRootIndex);
3873 __ Addu(result, result, scratch);
3875 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
3876 __ Branch(deferred->entry(),
eq, result, Operand(scratch));
3877 __ bind(deferred->exit());
3881 void LCodeGen::DoDeferredStringCharFromCode(LStringCharFromCode* instr) {
3882 Register char_code =
ToRegister(instr->char_code());
3883 Register result =
ToRegister(instr->result());
3888 __ mov(result, zero_reg);
3890 PushSafepointRegistersScope scope(
this, Safepoint::kWithRegisters);
3891 __ SmiTag(char_code);
3893 CallRuntimeFromDeferred(Runtime::kCharFromCode, 1, instr);
3894 __ StoreToSafepointRegisterSlot(v0, result);
3898 void LCodeGen::DoStringLength(LStringLength* instr) {
3899 Register
string =
ToRegister(instr->InputAt(0));
3900 Register result =
ToRegister(instr->result());
3905 void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) {
3906 LOperand* input = instr->InputAt(0);
3907 ASSERT(input->IsRegister() || input->IsStackSlot());
3908 LOperand* output = instr->result();
3909 ASSERT(output->IsDoubleRegister());
3910 FPURegister single_scratch = double_scratch0().low();
3911 if (input->IsStackSlot()) {
3912 Register scratch = scratch0();
3913 __ lw(scratch, ToMemOperand(input));
3914 __ mtc1(scratch, single_scratch);
3918 __ cvt_d_w(ToDoubleRegister(output), single_scratch);
3922 void LCodeGen::DoNumberTagI(LNumberTagI* instr) {
3923 class DeferredNumberTagI:
public LDeferredCode {
3925 DeferredNumberTagI(LCodeGen* codegen, LNumberTagI* instr)
3926 : LDeferredCode(codegen), instr_(instr) { }
3927 virtual void Generate() { codegen()->DoDeferredNumberTagI(instr_); }
3928 virtual LInstruction* instr() {
return instr_; }
3930 LNumberTagI* instr_;
3933 Register src =
ToRegister(instr->InputAt(0));
3935 Register overflow = scratch0();
3937 DeferredNumberTagI* deferred =
new(zone()) DeferredNumberTagI(
this, instr);
3938 __ SmiTagCheckOverflow(dst, src, overflow);
3939 __ BranchOnOverflow(deferred->entry(),
overflow);
3940 __ bind(deferred->exit());
3944 void LCodeGen::DoDeferredNumberTagI(LNumberTagI* instr) {
3946 Register src =
ToRegister(instr->InputAt(0));
3948 FPURegister dbl_scratch = double_scratch0();
3951 PushSafepointRegistersScope scope(
this, Safepoint::kWithRegisters);
3958 __ SmiUntag(src, dst);
3959 __ Xor(src, src, Operand(0x80000000));
3961 __ mtc1(src, dbl_scratch);
3962 __ cvt_d_w(dbl_scratch, dbl_scratch);
3963 if (FLAG_inline_new) {
3964 __ LoadRoot(t2, Heap::kHeapNumberMapRootIndex);
3965 __ AllocateHeapNumber(t1, a3, t0, t2, &slow);
3976 __ StoreToSafepointRegisterSlot(zero_reg, dst);
3977 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr);
3984 __ StoreToSafepointRegisterSlot(dst, dst);
3988 void LCodeGen::DoNumberTagD(LNumberTagD* instr) {
3989 class DeferredNumberTagD:
public LDeferredCode {
3991 DeferredNumberTagD(LCodeGen* codegen, LNumberTagD* instr)
3992 : LDeferredCode(codegen), instr_(instr) { }
3993 virtual void Generate() { codegen()->DoDeferredNumberTagD(instr_); }
3994 virtual LInstruction* instr() {
return instr_; }
3996 LNumberTagD* instr_;
4000 Register scratch = scratch0();
4002 Register temp1 =
ToRegister(instr->TempAt(0));
4003 Register temp2 =
ToRegister(instr->TempAt(1));
4005 DeferredNumberTagD* deferred =
new(zone()) DeferredNumberTagD(
this, instr);
4006 if (FLAG_inline_new) {
4007 __ LoadRoot(scratch, Heap::kHeapNumberMapRootIndex);
4008 __ AllocateHeapNumber(reg, temp1, temp2, scratch, deferred->entry());
4010 __ Branch(deferred->entry());
4012 __ bind(deferred->exit());
4017 void LCodeGen::DoDeferredNumberTagD(LNumberTagD* instr) {
4022 __ mov(reg, zero_reg);
4024 PushSafepointRegistersScope scope(
this, Safepoint::kWithRegisters);
4025 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr);
4026 __ StoreToSafepointRegisterSlot(v0, reg);
4030 void LCodeGen::DoSmiTag(LSmiTag* instr) {
4036 void LCodeGen::DoSmiUntag(LSmiUntag* instr) {
4037 Register scratch = scratch0();
4038 Register input =
ToRegister(instr->InputAt(0));
4039 Register result =
ToRegister(instr->result());
4040 if (instr->needs_check()) {
4044 __ SmiUntag(result, input);
4045 DeoptimizeIf(
ne, instr->environment(), scratch, Operand(zero_reg));
4047 __ SmiUntag(result, input);
4052 void LCodeGen::EmitNumberUntagD(Register input_reg,
4054 bool deoptimize_on_undefined,
4055 bool deoptimize_on_minus_zero,
4056 LEnvironment* env) {
4057 Register scratch = scratch0();
4059 Label load_smi, heap_number, done;
4062 __ UntagAndJumpIfSmi(scratch, input_reg, &load_smi);
4066 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
4067 if (deoptimize_on_undefined) {
4068 DeoptimizeIf(
ne, env, scratch, Operand(at));
4071 __ Branch(&heap_number,
eq, scratch, Operand(at));
4073 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
4074 DeoptimizeIf(
ne, env, input_reg, Operand(at));
4077 __ LoadRoot(at, Heap::kNanValueRootIndex);
4081 __ bind(&heap_number);
4085 if (deoptimize_on_minus_zero) {
4086 __ mfc1(at, result_reg.low());
4087 __ Branch(&done,
ne, at, Operand(zero_reg));
4088 __ mfc1(scratch, result_reg.high());
4096 __ mtc1(scratch, result_reg);
4097 __ cvt_d_w(result_reg, result_reg);
4102 void LCodeGen::DoDeferredTaggedToI(LTaggedToI* instr) {
4103 Register input_reg =
ToRegister(instr->InputAt(0));
4104 Register scratch1 = scratch0();
4105 Register scratch2 =
ToRegister(instr->TempAt(0));
4107 FPURegister single_scratch = double_scratch.low();
4109 ASSERT(!scratch1.is(input_reg) && !scratch1.is(scratch2));
4110 ASSERT(!scratch2.is(input_reg) && !scratch2.is(scratch1));
4117 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
4121 if (instr->truncating()) {
4122 Register scratch3 =
ToRegister(instr->TempAt(1));
4123 DoubleRegister double_scratch2 = ToDoubleRegister(instr->TempAt(2));
4124 ASSERT(!scratch3.is(input_reg) &&
4125 !scratch3.is(scratch1) &&
4126 !scratch3.is(scratch2));
4130 __ Branch(&heap_number,
eq, scratch1, Operand(at));
4133 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
4134 DeoptimizeIf(
ne, instr->environment(), input_reg, Operand(at));
4136 __ mov(input_reg, zero_reg);
4139 __ bind(&heap_number);
4140 __ ldc1(double_scratch2,
4142 __ EmitECMATruncate(input_reg,
4150 DeoptimizeIf(
ne, instr->environment(), scratch1, Operand(at));
4153 __ ldc1(double_scratch,
4156 Register except_flag = scratch2;
4165 DeoptimizeIf(
ne, instr->environment(), except_flag, Operand(zero_reg));
4168 __ mfc1(input_reg, single_scratch);
4171 __ Branch(&done,
ne, input_reg, Operand(zero_reg));
4173 __ mfc1(scratch1, double_scratch.high());
4175 DeoptimizeIf(
ne, instr->environment(), scratch1, Operand(zero_reg));
4182 void LCodeGen::DoTaggedToI(LTaggedToI* instr) {
4183 class DeferredTaggedToI:
public LDeferredCode {
4185 DeferredTaggedToI(LCodeGen* codegen, LTaggedToI* instr)
4186 : LDeferredCode(codegen), instr_(instr) { }
4187 virtual void Generate() { codegen()->DoDeferredTaggedToI(instr_); }
4188 virtual LInstruction* instr() {
return instr_; }
4193 LOperand* input = instr->InputAt(0);
4194 ASSERT(input->IsRegister());
4195 ASSERT(input->Equals(instr->result()));
4199 DeferredTaggedToI* deferred =
new(zone()) DeferredTaggedToI(
this, instr);
4202 __ JumpIfNotSmi(input_reg, deferred->entry());
4205 __ SmiUntag(input_reg);
4206 __ bind(deferred->exit());
4210 void LCodeGen::DoNumberUntagD(LNumberUntagD* instr) {
4211 LOperand* input = instr->InputAt(0);
4212 ASSERT(input->IsRegister());
4213 LOperand* result = instr->result();
4214 ASSERT(result->IsDoubleRegister());
4219 EmitNumberUntagD(input_reg, result_reg,
4220 instr->hydrogen()->deoptimize_on_undefined(),
4221 instr->hydrogen()->deoptimize_on_minus_zero(),
4222 instr->environment());
4226 void LCodeGen::DoDoubleToI(LDoubleToI* instr) {
4227 Register result_reg =
ToRegister(instr->result());
4228 Register scratch1 = scratch0();
4229 Register scratch2 =
ToRegister(instr->TempAt(0));
4230 DoubleRegister double_input = ToDoubleRegister(instr->InputAt(0));
4231 FPURegister single_scratch = double_scratch0().
low();
4233 if (instr->truncating()) {
4234 Register scratch3 =
ToRegister(instr->TempAt(1));
4235 __ EmitECMATruncate(result_reg,
4242 Register except_flag = scratch2;
4252 DeoptimizeIf(
ne, instr->environment(), except_flag, Operand(zero_reg));
4255 __ mfc1(result_reg, single_scratch);
4260 void LCodeGen::DoCheckSmi(LCheckSmi* instr) {
4261 LOperand* input = instr->InputAt(0);
4263 DeoptimizeIf(
ne, instr->environment(), at, Operand(zero_reg));
4267 void LCodeGen::DoCheckNonSmi(LCheckNonSmi* instr) {
4268 LOperand* input = instr->InputAt(0);
4270 DeoptimizeIf(
eq, instr->environment(), at, Operand(zero_reg));
4274 void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) {
4275 Register input =
ToRegister(instr->InputAt(0));
4276 Register scratch = scratch0();
4278 __ GetObjectType(input, scratch, scratch);
4280 if (instr->hydrogen()->is_interval_check()) {
4283 instr->hydrogen()->GetCheckInterval(&first, &last);
4286 if (first == last) {
4287 DeoptimizeIf(
ne, instr->environment(), scratch, Operand(first));
4289 DeoptimizeIf(
lo, instr->environment(), scratch, Operand(first));
4292 DeoptimizeIf(
hi, instr->environment(), scratch, Operand(last));
4298 instr->hydrogen()->GetCheckMaskAndTag(&mask, &tag);
4302 __ And(at, scratch, mask);
4303 DeoptimizeIf(tag == 0 ?
ne :
eq, instr->environment(),
4304 at, Operand(zero_reg));
4306 __ And(scratch, scratch, Operand(mask));
4307 DeoptimizeIf(
ne, instr->environment(), scratch, Operand(tag));
4313 void LCodeGen::DoCheckFunction(LCheckFunction* instr) {
4315 Handle<JSFunction> target = instr->hydrogen()->target();
4316 if (isolate()->heap()->InNewSpace(*target)) {
4318 Handle<JSGlobalPropertyCell> cell =
4319 isolate()->factory()->NewJSGlobalPropertyCell(target);
4320 __ li(at, Operand(Handle<Object>(cell)));
4322 DeoptimizeIf(
ne, instr->environment(), reg,
4325 DeoptimizeIf(
ne, instr->environment(), reg,
4331 void LCodeGen::DoCheckMapCommon(Register reg,
4335 LEnvironment* env) {
4337 __ CompareMapAndBranch(reg, scratch, map, &success,
eq, &success, mode);
4338 DeoptimizeIf(
al, env);
4343 void LCodeGen::DoCheckMaps(LCheckMaps* instr) {
4344 Register scratch = scratch0();
4345 LOperand* input = instr->InputAt(0);
4346 ASSERT(input->IsRegister());
4349 SmallMapList* map_set = instr->hydrogen()->map_set();
4350 for (
int i = 0; i < map_set->length() - 1; i++) {
4351 Handle<Map> map = map_set->at(i);
4352 __ CompareMapAndBranch(
4355 Handle<Map> map = map_set->last();
4361 void LCodeGen::DoClampDToUint8(LClampDToUint8* instr) {
4363 Register result_reg =
ToRegister(instr->result());
4365 __ ClampDoubleToUint8(result_reg, value_reg, temp_reg);
4369 void LCodeGen::DoClampIToUint8(LClampIToUint8* instr) {
4370 Register unclamped_reg =
ToRegister(instr->unclamped());
4371 Register result_reg =
ToRegister(instr->result());
4372 __ ClampUint8(result_reg, unclamped_reg);
4376 void LCodeGen::DoClampTToUint8(LClampTToUint8* instr) {
4377 Register scratch = scratch0();
4378 Register input_reg =
ToRegister(instr->unclamped());
4379 Register result_reg =
ToRegister(instr->result());
4381 Label is_smi, done, heap_number;
4384 __ UntagAndJumpIfSmi(scratch, input_reg, &is_smi);
4388 __ Branch(&heap_number,
eq, scratch, Operand(factory()->heap_number_map()));
4392 DeoptimizeIf(
ne, instr->environment(), input_reg,
4393 Operand(factory()->undefined_value()));
4394 __ mov(result_reg, zero_reg);
4398 __ bind(&heap_number);
4401 __ ClampDoubleToUint8(result_reg, double_scratch0(), temp_reg);
4405 __ ClampUint8(result_reg, scratch);
4411 void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) {
4412 Register temp1 =
ToRegister(instr->TempAt(0));
4413 Register temp2 =
ToRegister(instr->TempAt(1));
4415 Handle<JSObject> holder = instr->holder();
4416 Handle<JSObject> current_prototype = instr->prototype();
4419 __ LoadHeapObject(temp1, current_prototype);
4422 while (!current_prototype.is_identical_to(holder)) {
4423 DoCheckMapCommon(temp1, temp2,
4424 Handle<Map>(current_prototype->map()),
4427 Handle<JSObject>(
JSObject::cast(current_prototype->GetPrototype()));
4429 __ LoadHeapObject(temp1, current_prototype);
4433 DoCheckMapCommon(temp1, temp2,
4434 Handle<Map>(current_prototype->map()),
4439 void LCodeGen::DoAllocateObject(LAllocateObject* instr) {
4440 class DeferredAllocateObject:
public LDeferredCode {
4442 DeferredAllocateObject(LCodeGen* codegen, LAllocateObject* instr)
4443 : LDeferredCode(codegen), instr_(instr) { }
4444 virtual void Generate() { codegen()->DoDeferredAllocateObject(instr_); }
4445 virtual LInstruction* instr() {
return instr_; }
4447 LAllocateObject* instr_;
4450 DeferredAllocateObject* deferred =
4451 new(zone()) DeferredAllocateObject(
this, instr);
4453 Register result =
ToRegister(instr->result());
4454 Register scratch =
ToRegister(instr->TempAt(0));
4455 Register scratch2 =
ToRegister(instr->TempAt(1));
4456 Handle<JSFunction> constructor = instr->hydrogen()->constructor();
4457 Handle<Map> initial_map(constructor->initial_map());
4458 int instance_size = initial_map->instance_size();
4459 ASSERT(initial_map->pre_allocated_property_fields() +
4460 initial_map->unused_property_fields() -
4461 initial_map->inobject_properties() == 0);
4466 ASSERT(!constructor->shared()->IsInobjectSlackTrackingInProgress());
4467 __ AllocateInNewSpace(instance_size,
4474 __ bind(deferred->exit());
4475 if (FLAG_debug_code) {
4476 Label is_in_new_space;
4477 __ JumpIfInNewSpace(result, scratch, &is_in_new_space);
4478 __ Abort(
"Allocated object is not in new-space");
4479 __ bind(&is_in_new_space);
4483 Register map = scratch;
4484 __ LoadHeapObject(map, constructor);
4490 __ LoadRoot(scratch, Heap::kEmptyFixedArrayRootIndex);
4493 if (initial_map->inobject_properties() != 0) {
4494 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
4495 for (
int i = 0; i < initial_map->inobject_properties(); i++) {
4503 void LCodeGen::DoDeferredAllocateObject(LAllocateObject* instr) {
4504 Register result =
ToRegister(instr->result());
4505 Handle<JSFunction> constructor = instr->hydrogen()->constructor();
4506 Handle<Map> initial_map(constructor->initial_map());
4507 int instance_size = initial_map->instance_size();
4512 __ mov(result, zero_reg);
4514 PushSafepointRegistersScope scope(
this, Safepoint::kWithRegisters);
4517 CallRuntimeFromDeferred(Runtime::kAllocateInNewSpace, 1, instr);
4518 __ StoreToSafepointRegisterSlot(v0, result);
4522 void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
4523 Heap* heap = isolate()->heap();
4525 instr->hydrogen()->boilerplate_elements_kind();
4531 boilerplate_elements_kind,
true)) {
4532 __ LoadHeapObject(a1, instr->hydrogen()->boilerplate_object());
4540 instr->environment(),
4542 Operand(boilerplate_elements_kind));
4546 __ li(a2, Operand(
Smi::FromInt(instr->hydrogen()->literal_index())));
4549 __ li(a1, Operand(Handle<FixedArray>(heap->empty_fixed_array())));
4550 __ Push(a3, a2, a1);
4553 int length = instr->hydrogen()->length();
4554 if (instr->hydrogen()->IsCopyOnWrite()) {
4555 ASSERT(instr->hydrogen()->depth() == 1);
4558 FastCloneShallowArrayStub stub(mode, length);
4559 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
4560 }
else if (instr->hydrogen()->depth() > 1) {
4561 CallRuntime(Runtime::kCreateArrayLiteral, 3, instr);
4563 CallRuntime(Runtime::kCreateArrayLiteralShallow, 3, instr);
4569 FastCloneShallowArrayStub stub(mode, length);
4570 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
4575 void LCodeGen::EmitDeepCopy(Handle<JSObject>
object,
4583 Handle<FixedArrayBase> elements(object->elements());
4584 bool has_elements = elements->length() > 0 &&
4585 elements->map() != isolate()->heap()->fixed_cow_array_map();
4589 int object_offset = *offset;
4590 int object_size =
object->map()->instance_size();
4591 int elements_offset = *offset + object_size;
4592 int elements_size = has_elements ? elements->Size() : 0;
4593 *offset += object_size + elements_size;
4596 ASSERT(object->properties()->length() == 0);
4597 int inobject_properties =
object->map()->inobject_properties();
4598 int header_size = object_size - inobject_properties *
kPointerSize;
4601 __ Addu(a2, result, Operand(elements_offset));
4609 for (
int i = 0; i < inobject_properties; i++) {
4610 int total_offset = object_offset +
object->GetInObjectPropertyOffset(i);
4611 Handle<Object> value = Handle<Object>(
object->InObjectPropertyAt(i));
4612 if (value->IsJSObject()) {
4614 __ Addu(a2, result, Operand(*offset));
4616 __ LoadHeapObject(source, value_object);
4617 EmitDeepCopy(value_object, result, source, offset);
4618 }
else if (value->IsHeapObject()) {
4619 __ LoadHeapObject(a2, Handle<HeapObject>::cast(value));
4622 __ li(a2, Operand(value));
4630 __ LoadHeapObject(source, elements);
4637 int elements_length = has_elements ? elements->length() : 0;
4638 if (elements->IsFixedDoubleArray()) {
4639 Handle<FixedDoubleArray> double_array =
4641 for (
int i = 0; i < elements_length; i++) {
4642 int64_t value = double_array->get_representation(i);
4644 int32_t value_low = value & 0xFFFFFFFF;
4645 int32_t value_high = value >> 32;
4648 __ li(a2, Operand(value_low));
4650 __ li(a2, Operand(value_high));
4653 }
else if (elements->IsFixedArray()) {
4655 for (
int i = 0; i < elements_length; i++) {
4657 Handle<Object> value(fast_elements->get(i));
4658 if (value->IsJSObject()) {
4660 __ Addu(a2, result, Operand(*offset));
4662 __ LoadHeapObject(source, value_object);
4663 EmitDeepCopy(value_object, result, source, offset);
4664 }
else if (value->IsHeapObject()) {
4665 __ LoadHeapObject(a2, Handle<HeapObject>::cast(value));
4668 __ li(a2, Operand(value));
4679 void LCodeGen::DoFastLiteral(LFastLiteral* instr) {
4680 int size = instr->hydrogen()->total_size();
4682 instr->hydrogen()->boilerplate()->GetElementsKind();
4688 boilerplate_elements_kind,
true)) {
4689 __ LoadHeapObject(a1, instr->hydrogen()->boilerplate());
4696 DeoptimizeIf(
ne, instr->environment(), a2,
4697 Operand(boilerplate_elements_kind));
4702 Label allocated, runtime_allocate;
4703 __ AllocateInNewSpace(size, v0, a2, a3, &runtime_allocate,
TAG_OBJECT);
4706 __ bind(&runtime_allocate);
4709 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr);
4711 __ bind(&allocated);
4713 __ LoadHeapObject(a1, instr->hydrogen()->boilerplate());
4714 EmitDeepCopy(instr->hydrogen()->boilerplate(), v0, a1, &offset);
4719 void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) {
4721 Handle<FixedArray> literals(instr->environment()->closure()->literals());
4722 Handle<FixedArray> constant_properties =
4723 instr->hydrogen()->constant_properties();
4726 __ LoadHeapObject(t0, literals);
4727 __ li(a3, Operand(
Smi::FromInt(instr->hydrogen()->literal_index())));
4728 __ li(a2, Operand(constant_properties));
4729 int flags = instr->hydrogen()->fast_elements()
4733 __ Push(t0, a3, a2, a1);
4736 int properties_count = constant_properties->length() / 2;
4737 if (instr->hydrogen()->depth() > 1) {
4738 CallRuntime(Runtime::kCreateObjectLiteral, 4, instr);
4741 CallRuntime(Runtime::kCreateObjectLiteralShallow, 4, instr);
4743 FastCloneShallowObjectStub stub(properties_count);
4744 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
4749 void LCodeGen::DoToFastProperties(LToFastProperties* instr) {
4753 CallRuntime(Runtime::kToFastProperties, 1, instr);
4757 void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) {
4767 int literal_offset = FixedArray::kHeaderSize +
4770 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
4771 __ Branch(&materialized,
ne, a1, Operand(at));
4775 __ li(t2, Operand(
Smi::FromInt(instr->hydrogen()->literal_index())));
4776 __ li(t1, Operand(instr->hydrogen()->pattern()));
4777 __ li(t0, Operand(instr->hydrogen()->flags()));
4778 __ Push(t3, t2, t1, t0);
4779 CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr);
4782 __ bind(&materialized);
4784 Label allocated, runtime_allocate;
4786 __ AllocateInNewSpace(size, v0, a2, a3, &runtime_allocate,
TAG_OBJECT);
4789 __ bind(&runtime_allocate);
4792 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr);
4795 __ bind(&allocated);
4804 if ((size % (2 * kPointerSize)) != 0) {
4811 void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) {
4814 Handle<SharedFunctionInfo> shared_info = instr->shared_info();
4815 bool pretenure = instr->hydrogen()->pretenure();
4816 if (!pretenure && shared_info->num_literals() == 0) {
4817 FastNewClosureStub stub(shared_info->language_mode());
4818 __ li(a1, Operand(shared_info));
4820 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
4822 __ li(a2, Operand(shared_info));
4823 __ li(a1, Operand(pretenure
4824 ? factory()->true_value()
4825 : factory()->false_value()));
4826 __ Push(
cp, a2, a1);
4827 CallRuntime(Runtime::kNewClosure, 3, instr);
4832 void LCodeGen::DoTypeof(LTypeof* instr) {
4834 Register input =
ToRegister(instr->InputAt(0));
4836 CallRuntime(Runtime::kTypeof, 1, instr);
4840 void LCodeGen::DoTypeofIsAndBranch(LTypeofIsAndBranch* instr) {
4841 Register input =
ToRegister(instr->InputAt(0));
4842 int true_block = chunk_->LookupDestination(instr->true_block_id());
4843 int false_block = chunk_->LookupDestination(instr->false_block_id());
4844 Label* true_label = chunk_->GetAssemblyLabel(true_block);
4845 Label* false_label = chunk_->GetAssemblyLabel(false_block);
4848 Operand cmp2 = Operand(
no_reg);
4850 Condition final_branch_condition = EmitTypeofIs(true_label,
4853 instr->type_literal(),
4858 ASSERT(!cmp2.is_reg() || cmp2.rm().is_valid());
4861 EmitBranch(true_block, false_block, final_branch_condition, cmp1, cmp2);
4866 Condition LCodeGen::EmitTypeofIs(Label* true_label,
4869 Handle<String> type_name,
4876 Register scratch = scratch0();
4877 if (type_name->Equals(heap()->number_symbol())) {
4878 __ JumpIfSmi(input, true_label);
4880 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
4883 final_branch_condition =
eq;
4885 }
else if (type_name->Equals(heap()->string_symbol())) {
4886 __ JumpIfSmi(input, false_label);
4887 __ GetObjectType(input, input, scratch);
4895 cmp2 = Operand(zero_reg);
4896 final_branch_condition =
eq;
4898 }
else if (type_name->Equals(heap()->boolean_symbol())) {
4899 __ LoadRoot(at, Heap::kTrueValueRootIndex);
4901 __ LoadRoot(at, Heap::kFalseValueRootIndex);
4903 cmp2 = Operand(input);
4904 final_branch_condition =
eq;
4906 }
else if (FLAG_harmony_typeof && type_name->Equals(heap()->null_symbol())) {
4907 __ LoadRoot(at, Heap::kNullValueRootIndex);
4909 cmp2 = Operand(input);
4910 final_branch_condition =
eq;
4912 }
else if (type_name->Equals(heap()->undefined_symbol())) {
4913 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
4917 __ JumpIfSmi(input, false_label);
4923 cmp2 = Operand(zero_reg);
4924 final_branch_condition =
ne;
4926 }
else if (type_name->Equals(heap()->function_symbol())) {
4928 __ JumpIfSmi(input, false_label);
4929 __ GetObjectType(input, scratch, input);
4933 final_branch_condition =
eq;
4935 }
else if (type_name->Equals(heap()->object_symbol())) {
4936 __ JumpIfSmi(input, false_label);
4937 if (!FLAG_harmony_typeof) {
4938 __ LoadRoot(at, Heap::kNullValueRootIndex);
4942 __ GetObjectType(input, input, scratch);
4954 cmp2 = Operand(zero_reg);
4955 final_branch_condition =
eq;
4959 cmp2 = Operand(zero_reg);
4960 __ Branch(false_label);
4963 return final_branch_condition;
4967 void LCodeGen::DoIsConstructCallAndBranch(LIsConstructCallAndBranch* instr) {
4968 Register temp1 =
ToRegister(instr->TempAt(0));
4969 int true_block = chunk_->LookupDestination(instr->true_block_id());
4970 int false_block = chunk_->LookupDestination(instr->false_block_id());
4972 EmitIsConstructCall(temp1, scratch0());
4974 EmitBranch(true_block, false_block,
eq, temp1,
4979 void LCodeGen::EmitIsConstructCall(Register temp1, Register temp2) {
4980 ASSERT(!temp1.is(temp2));
4985 Label check_frame_marker;
4987 __ Branch(&check_frame_marker,
ne, temp2,
4992 __ bind(&check_frame_marker);
4997 void LCodeGen::EnsureSpaceForLazyDeopt() {
5000 int current_pc = masm()->pc_offset();
5002 if (current_pc < last_lazy_deopt_pc_ + patch_size) {
5003 int padding_size = last_lazy_deopt_pc_ + patch_size - current_pc;
5005 while (padding_size > 0) {
5010 last_lazy_deopt_pc_ = masm()->pc_offset();
5014 void LCodeGen::DoLazyBailout(LLazyBailout* instr) {
5015 EnsureSpaceForLazyDeopt();
5016 ASSERT(instr->HasEnvironment());
5017 LEnvironment* env = instr->environment();
5018 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
5019 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
5023 void LCodeGen::DoDeoptimize(LDeoptimize* instr) {
5024 DeoptimizeIf(
al, instr->environment(), zero_reg, Operand(zero_reg));
5028 void LCodeGen::DoDeleteProperty(LDeleteProperty* instr) {
5029 Register
object =
ToRegister(instr->object());
5031 Register strict = scratch0();
5033 __ Push(
object, key, strict);
5034 ASSERT(instr->HasPointerMap());
5035 LPointerMap* pointers = instr->pointer_map();
5036 RecordPosition(pointers->position());
5037 SafepointGenerator safepoint_generator(
5038 this, pointers, Safepoint::kLazyDeopt);
5039 __ InvokeBuiltin(Builtins::DELETE,
CALL_FUNCTION, safepoint_generator);
5043 void LCodeGen::DoIn(LIn* instr) {
5047 ASSERT(instr->HasPointerMap());
5048 LPointerMap* pointers = instr->pointer_map();
5049 RecordPosition(pointers->position());
5050 SafepointGenerator safepoint_generator(
this, pointers, Safepoint::kLazyDeopt);
5055 void LCodeGen::DoDeferredStackCheck(LStackCheck* instr) {
5056 PushSafepointRegistersScope scope(
this, Safepoint::kWithRegisters);
5057 __ CallRuntimeSaveDoubles(Runtime::kStackGuard);
5058 RecordSafepointWithLazyDeopt(
5059 instr, RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
5060 ASSERT(instr->HasEnvironment());
5061 LEnvironment* env = instr->environment();
5062 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
5066 void LCodeGen::DoStackCheck(LStackCheck* instr) {
5067 class DeferredStackCheck:
public LDeferredCode {
5069 DeferredStackCheck(LCodeGen* codegen, LStackCheck* instr)
5070 : LDeferredCode(codegen), instr_(instr) { }
5071 virtual void Generate() { codegen()->DoDeferredStackCheck(instr_); }
5072 virtual LInstruction* instr() {
return instr_; }
5074 LStackCheck* instr_;
5077 ASSERT(instr->HasEnvironment());
5078 LEnvironment* env = instr->environment();
5081 if (instr->hydrogen()->is_function_entry()) {
5084 __ LoadRoot(at, Heap::kStackLimitRootIndex);
5085 __ Branch(&done,
hs,
sp, Operand(at));
5086 StackCheckStub stub;
5087 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
5088 EnsureSpaceForLazyDeopt();
5090 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
5091 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
5093 ASSERT(instr->hydrogen()->is_backwards_branch());
5095 DeferredStackCheck* deferred_stack_check =
5096 new(zone()) DeferredStackCheck(
this, instr);
5097 __ LoadRoot(at, Heap::kStackLimitRootIndex);
5098 __ Branch(deferred_stack_check->entry(),
lo,
sp, Operand(at));
5099 EnsureSpaceForLazyDeopt();
5100 __ bind(instr->done_label());
5101 deferred_stack_check->SetExit(instr->done_label());
5102 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
5110 void LCodeGen::DoOsrEntry(LOsrEntry* instr) {
5114 LEnvironment* environment = instr->environment();
5115 environment->SetSpilledRegisters(instr->SpilledRegisterArray(),
5116 instr->SpilledDoubleRegisterArray());
5120 ASSERT(!environment->HasBeenRegistered());
5121 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt);
5122 ASSERT(osr_pc_offset_ == -1);
5123 osr_pc_offset_ = masm()->pc_offset();
5127 void LCodeGen::DoForInPrepareMap(LForInPrepareMap* instr) {
5128 Register result =
ToRegister(instr->result());
5129 Register
object =
ToRegister(instr->object());
5130 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
5131 DeoptimizeIf(
eq, instr->environment(), object, Operand(at));
5133 Register null_value = t1;
5134 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
5135 DeoptimizeIf(
eq, instr->environment(), object, Operand(null_value));
5138 DeoptimizeIf(
eq, instr->environment(), at, Operand(zero_reg));
5141 __ GetObjectType(
object, a1, a1);
5144 Label use_cache, call_runtime;
5146 __ CheckEnumCache(null_value, &call_runtime);
5149 __ Branch(&use_cache);
5152 __ bind(&call_runtime);
5154 CallRuntime(Runtime::kGetPropertyNamesFast, 1, instr);
5158 __ LoadRoot(at, Heap::kMetaMapRootIndex);
5159 DeoptimizeIf(
ne, instr->environment(), a1, Operand(at));
5160 __ bind(&use_cache);
5164 void LCodeGen::DoForInCacheArray(LForInCacheArray* instr) {
5166 Register result =
ToRegister(instr->result());
5167 __ LoadInstanceDescriptors(map, result);
5172 DeoptimizeIf(
eq, instr->environment(), result, Operand(zero_reg));
5176 void LCodeGen::DoCheckMapValue(LCheckMapValue* instr) {
5177 Register
object =
ToRegister(instr->value());
5180 DeoptimizeIf(
ne, instr->environment(), map, Operand(scratch0()));
5184 void LCodeGen::DoLoadFieldByIndex(LLoadFieldByIndex* instr) {
5185 Register
object =
ToRegister(instr->object());
5187 Register result =
ToRegister(instr->result());
5188 Register scratch = scratch0();
5190 Label out_of_object, done;
5195 __ Addu(scratch,
object, scratch);
5200 __ bind(&out_of_object);
5203 __ Subu(scratch, result, scratch);
5205 FixedArray::kHeaderSize - kPointerSize));
static const int kCallerFPOffset
static const int kLengthOffset
static const int kBitFieldOffset
static LGap * cast(LInstruction *instr)
const intptr_t kSmiTagMask
static const int kCodeEntryOffset
static const int kMaxAsciiCharCode
static const int kPrototypeOrInitialMapOffset
const char * ToCString(const v8::String::Utf8Value &value)
static int SlotOffset(int index)
virtual void AfterCall() const
void PrintF(const char *format,...)
static Smi * FromInt(int value)
bool IsFastObjectElementsKind(ElementsKind kind)
static const int kElementsKindBitCount
value format" "after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false, "print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false, "print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false, "report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true, "garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true, "flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true, "use incremental marking") DEFINE_bool(incremental_marking_steps, true, "do incremental marking steps") DEFINE_bool(trace_incremental_marking, false, "trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true, "Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false, "Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true, "use inline caching") DEFINE_bool(native_code_counters, false, "generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false, "Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true, "Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false, "Never perform compaction on full GC-testing only") DEFINE_bool(compact_code_space, true, "Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true, "Flush inline caches prior to mark compact collection and" "flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0, "Default seed for initializing random generator" "(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true, "allows verbose printing") DEFINE_bool(allow_natives_syntax, false, "allow natives syntax") DEFINE_bool(trace_sim, false, "Trace simulator execution") DEFINE_bool(check_icache, false, "Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0, "Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8, "Stack alingment in bytes in simulator(4 or 8, 8 is default)") DEFINE_bool(trace_exception, false, "print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false, "preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true, "randomize hashes to avoid predictable hash collisions" "(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0, "Fixed seed to use to hash property keys(0 means random)" "(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false, "activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true, "generate optimized regexp code") DEFINE_bool(testing_bool_flag, true, "testing_bool_flag") DEFINE_int(testing_int_flag, 13, "testing_int_flag") DEFINE_float(testing_float_flag, 2.5, "float-flag") DEFINE_string(testing_string_flag, "Hello, world!", "string-flag") DEFINE_int(testing_prng_seed, 42, "Seed used for threading test randomness") DEFINE_string(testing_serialization_file, "/tmp/serdes", "file in which to serialize heap") DEFINE_bool(help, false, "Print usage message, including flags, on console") DEFINE_bool(dump_counters, false, "Dump counters on exit") DEFINE_string(map_counters, "", "Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT, "Pass all remaining arguments to the script.Alias for\"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#43"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2#define FLAG_MODE_DEFINE_DEFAULTS#1"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flag-definitions.h"1#define FLAG_FULL(ftype, ctype, nam, def, cmt)#define FLAG_READONLY(ftype, ctype, nam, def, cmt)#define DEFINE_implication(whenflag, thenflag)#define DEFINE_bool(nam, def, cmt)#define DEFINE_int(nam, def, cmt)#define DEFINE_float(nam, def, cmt)#define DEFINE_string(nam, def, cmt)#define DEFINE_args(nam, def, cmt)#define FLAG DEFINE_bool(use_strict, false,"enforce strict mode") DEFINE_bool(es5_readonly, false,"activate correct semantics for inheriting readonliness") DEFINE_bool(es52_globals, false,"activate new semantics for global var declarations") DEFINE_bool(harmony_typeof, false,"enable harmony semantics for typeof") DEFINE_bool(harmony_scoping, false,"enable harmony block scoping") DEFINE_bool(harmony_modules, false,"enable harmony modules (implies block scoping)") DEFINE_bool(harmony_proxies, false,"enable harmony proxies") DEFINE_bool(harmony_collections, false,"enable harmony collections (sets, maps, and weak maps)") DEFINE_bool(harmony, false,"enable all harmony features (except typeof)") DEFINE_implication(harmony, harmony_scoping) DEFINE_implication(harmony, harmony_modules) DEFINE_implication(harmony, harmony_proxies) DEFINE_implication(harmony, harmony_collections) DEFINE_implication(harmony_modules, harmony_scoping) DEFINE_bool(packed_arrays, false,"optimizes arrays that have no holes") DEFINE_bool(smi_only_arrays, true,"tracks arrays with only smi values") DEFINE_bool(clever_optimizations, true,"Optimize object size, Array shift, DOM strings and string +") DEFINE_bool(unbox_double_arrays, true,"automatically unbox arrays of doubles") DEFINE_bool(string_slices, true,"use string slices") DEFINE_bool(crankshaft, true,"use crankshaft") DEFINE_string(hydrogen_filter,"","optimization filter") DEFINE_bool(use_range, true,"use hydrogen range analysis") DEFINE_bool(eliminate_dead_phis, true,"eliminate dead phis") DEFINE_bool(use_gvn, true,"use hydrogen global value numbering") DEFINE_bool(use_canonicalizing, true,"use hydrogen instruction canonicalizing") DEFINE_bool(use_inlining, true,"use function inlining") DEFINE_int(max_inlined_source_size, 600,"maximum source size in bytes considered for a single inlining") DEFINE_int(max_inlined_nodes, 196,"maximum number of AST nodes considered for a single inlining") DEFINE_int(max_inlined_nodes_cumulative, 196,"maximum cumulative number of AST nodes considered for inlining") DEFINE_bool(loop_invariant_code_motion, true,"loop invariant code motion") DEFINE_bool(collect_megamorphic_maps_from_stub_cache, true,"crankshaft harvests type feedback from stub cache") DEFINE_bool(hydrogen_stats, false,"print statistics for hydrogen") DEFINE_bool(trace_hydrogen, false,"trace generated hydrogen to file") DEFINE_string(trace_phase,"Z","trace generated IR for specified phases") DEFINE_bool(trace_inlining, false,"trace inlining decisions") DEFINE_bool(trace_alloc, false,"trace register allocator") DEFINE_bool(trace_all_uses, false,"trace all use positions") DEFINE_bool(trace_range, false,"trace range analysis") DEFINE_bool(trace_gvn, false,"trace global value numbering") DEFINE_bool(trace_representation, false,"trace representation types") DEFINE_bool(stress_pointer_maps, false,"pointer map for every instruction") DEFINE_bool(stress_environments, false,"environment for every instruction") DEFINE_int(deopt_every_n_times, 0,"deoptimize every n times a deopt point is passed") DEFINE_bool(trap_on_deopt, false,"put a break point before deoptimizing") DEFINE_bool(deoptimize_uncommon_cases, true,"deoptimize uncommon cases") DEFINE_bool(polymorphic_inlining, true,"polymorphic inlining") DEFINE_bool(use_osr, true,"use on-stack replacement") DEFINE_bool(array_bounds_checks_elimination, false,"perform array bounds checks elimination") DEFINE_bool(array_index_dehoisting, false,"perform array index dehoisting") DEFINE_bool(trace_osr, false,"trace on-stack replacement") DEFINE_int(stress_runs, 0,"number of stress runs") DEFINE_bool(optimize_closures, true,"optimize closures") DEFINE_bool(inline_construct, true,"inline constructor calls") DEFINE_bool(inline_arguments, true,"inline functions with arguments object") DEFINE_int(loop_weight, 1,"loop weight for representation inference") DEFINE_bool(optimize_for_in, true,"optimize functions containing for-in loops") DEFINE_bool(experimental_profiler, true,"enable all profiler experiments") DEFINE_bool(watch_ic_patching, false,"profiler considers IC stability") DEFINE_int(frame_count, 1,"number of stack frames inspected by the profiler") DEFINE_bool(self_optimization, false,"primitive functions trigger their own optimization") DEFINE_bool(direct_self_opt, false,"call recompile stub directly when self-optimizing") DEFINE_bool(retry_self_opt, false,"re-try self-optimization if it failed") DEFINE_bool(count_based_interrupts, false,"trigger profiler ticks based on counting instead of timing") DEFINE_bool(interrupt_at_exit, false,"insert an interrupt check at function exit") DEFINE_bool(weighted_back_edges, false,"weight back edges by jump distance for interrupt triggering") DEFINE_int(interrupt_budget, 5900,"execution budget before interrupt is triggered") DEFINE_int(type_info_threshold, 15,"percentage of ICs that must have type info to allow optimization") DEFINE_int(self_opt_count, 130,"call count before self-optimization") DEFINE_implication(experimental_profiler, watch_ic_patching) DEFINE_implication(experimental_profiler, self_optimization) DEFINE_implication(experimental_profiler, retry_self_opt) DEFINE_implication(experimental_profiler, count_based_interrupts) DEFINE_implication(experimental_profiler, interrupt_at_exit) DEFINE_implication(experimental_profiler, weighted_back_edges) DEFINE_bool(trace_opt_verbose, false,"extra verbose compilation tracing") DEFINE_implication(trace_opt_verbose, trace_opt) DEFINE_bool(debug_code, false,"generate extra code (assertions) for debugging") DEFINE_bool(code_comments, false,"emit comments in code disassembly") DEFINE_bool(enable_sse2, true,"enable use of SSE2 instructions if available") DEFINE_bool(enable_sse3, true,"enable use of SSE3 instructions if available") DEFINE_bool(enable_sse4_1, true,"enable use of SSE4.1 instructions if available") DEFINE_bool(enable_cmov, true,"enable use of CMOV instruction if available") DEFINE_bool(enable_rdtsc, true,"enable use of RDTSC instruction if available") DEFINE_bool(enable_sahf, true,"enable use of SAHF instruction if available (X64 only)") DEFINE_bool(enable_vfp3, true,"enable use of VFP3 instructions if available - this implies ""enabling ARMv7 instructions (ARM only)") DEFINE_bool(enable_armv7, true,"enable use of ARMv7 instructions if available (ARM only)") DEFINE_bool(enable_fpu, true,"enable use of MIPS FPU instructions if available (MIPS only)") DEFINE_string(expose_natives_as, NULL,"expose natives in global object") DEFINE_string(expose_debug_as, NULL,"expose debug in global object") DEFINE_bool(expose_gc, false,"expose gc extension") DEFINE_bool(expose_externalize_string, false,"expose externalize string extension") DEFINE_int(stack_trace_limit, 10,"number of stack frames to capture") DEFINE_bool(builtins_in_stack_traces, false,"show built-in functions in stack traces") DEFINE_bool(disable_native_files, false,"disable builtin natives files") DEFINE_bool(inline_new, true,"use fast inline allocation") DEFINE_bool(stack_trace_on_abort, true,"print a stack trace if an assertion failure occurs") DEFINE_bool(trace, false,"trace function calls") DEFINE_bool(mask_constants_with_cookie, true,"use random jit cookie to mask large constants") DEFINE_bool(lazy, true,"use lazy compilation") DEFINE_bool(trace_opt, false,"trace lazy optimization") DEFINE_bool(trace_opt_stats, false,"trace lazy optimization statistics") DEFINE_bool(opt, true,"use adaptive optimizations") DEFINE_bool(always_opt, false,"always try to optimize functions") DEFINE_bool(prepare_always_opt, false,"prepare for turning on always opt") DEFINE_bool(trace_deopt, false,"trace deoptimization") DEFINE_int(min_preparse_length, 1024,"minimum length for automatic enable preparsing") DEFINE_bool(always_full_compiler, false,"try to use the dedicated run-once backend for all code") DEFINE_bool(trace_bailout, false,"print reasons for falling back to using the classic V8 backend") DEFINE_bool(compilation_cache, true,"enable compilation cache") DEFINE_bool(cache_prototype_transitions, true,"cache prototype transitions") DEFINE_bool(trace_debug_json, false,"trace debugging JSON request/response") DEFINE_bool(debugger_auto_break, true,"automatically set the debug break flag when debugger commands are ""in the queue") DEFINE_bool(enable_liveedit, true,"enable liveedit experimental feature") DEFINE_bool(break_on_abort, true,"always cause a debug break before aborting") DEFINE_int(stack_size, kPointerSize *123,"default size of stack region v8 is allowed to use (in kBytes)") DEFINE_int(max_stack_trace_source_length, 300,"maximum length of function source code printed in a stack trace.") DEFINE_bool(always_inline_smi_code, false,"always inline smi code in non-opt code") DEFINE_int(max_new_space_size, 0,"max size of the new generation (in kBytes)") DEFINE_int(max_old_space_size, 0,"max size of the old generation (in Mbytes)") DEFINE_int(max_executable_size, 0,"max size of executable memory (in Mbytes)") DEFINE_bool(gc_global, false,"always perform global GCs") DEFINE_int(gc_interval,-1,"garbage collect after <n> allocations") DEFINE_bool(trace_gc, false,"print one trace line following each garbage collection") DEFINE_bool(trace_gc_nvp, false,"print one detailed trace line in name=value format ""after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false,"print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false,"print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false,"report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true,"garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true,"flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true,"use incremental marking") DEFINE_bool(incremental_marking_steps, true,"do incremental marking steps") DEFINE_bool(trace_incremental_marking, false,"trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true,"Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false,"Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true,"use inline caching") DEFINE_bool(native_code_counters, false,"generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false,"Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true,"Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false,"Never perform compaction on full GC - testing only") DEFINE_bool(compact_code_space, true,"Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true,"Flush inline caches prior to mark compact collection and ""flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0,"Default seed for initializing random generator ""(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true,"allows verbose printing") DEFINE_bool(allow_natives_syntax, false,"allow natives syntax") DEFINE_bool(trace_sim, false,"Trace simulator execution") DEFINE_bool(check_icache, false,"Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0,"Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8,"Stack alingment in bytes in simulator (4 or 8, 8 is default)") DEFINE_bool(trace_exception, false,"print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false,"preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true,"randomize hashes to avoid predictable hash collisions ""(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0,"Fixed seed to use to hash property keys (0 means random)""(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false,"activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true,"generate optimized regexp code") DEFINE_bool(testing_bool_flag, true,"testing_bool_flag") DEFINE_int(testing_int_flag, 13,"testing_int_flag") DEFINE_float(testing_float_flag, 2.5,"float-flag") DEFINE_string(testing_string_flag,"Hello, world!","string-flag") DEFINE_int(testing_prng_seed, 42,"Seed used for threading test randomness") DEFINE_string(testing_serialization_file,"/tmp/serdes","file in which to serialize heap") DEFINE_bool(help, false,"Print usage message, including flags, on console") DEFINE_bool(dump_counters, false,"Dump counters on exit") DEFINE_string(map_counters,"","Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT,"Pass all remaining arguments to the script. Alias for \"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#47"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2 namespace{struct Flag{enum FlagType{TYPE_BOOL, TYPE_INT, TYPE_FLOAT, TYPE_STRING, TYPE_ARGS} name
static HeapObject * cast(Object *obj)
static Handle< T > cast(Handle< S > that)
static const int kGlobalReceiverOffset
static const int kExponentBias
static const int kExternalPointerOffset
virtual ~SafepointGenerator()
static const int kCallerSPOffset
#define ASSERT(condition)
bool CanTransitionToMoreGeneralFastElementsKind(ElementsKind elements_kind, bool allow_only_packed)
const int kPointerSizeLog2
static const int kInObjectFieldCount
static const int kMaximumSlots
MemOperand GlobalObjectOperand()
static const int kInstanceClassNameOffset
int WhichPowerOf2(uint32_t x)
static const int kGlobalContextOffset
MemOperand ContextOperand(Register context, int index)
static const int kContextOffset
Handle< String > SubString(Handle< String > str, int start, int end, PretenureFlag pretenure)
static const int kHashFieldOffset
static DwVfpRegister FromAllocationIndex(int index)
Condition ReverseCondition(Condition cond)
const uint32_t kSlotsZapValue
static const int kLiteralsOffset
DwVfpRegister DoubleRegister
STATIC_ASSERT((FixedDoubleArray::kHeaderSize &kDoubleAlignmentMask)==0)
static const int kLengthOffset
static const int kExponentShift
FPURegister FloatRegister
static const int kValueOffset
const uint32_t kHoleNanUpper32
static LConstantOperand * cast(LOperand *op)
const uint32_t kHoleNanLower32
static Register FromAllocationIndex(int index)
static void VPrint(const char *format, va_list args)
static const int kCacheStampOffset
static const int kPropertiesOffset
bool IsFastSmiElementsKind(ElementsKind kind)
static int OffsetOfElementAt(int index)
static void Generate(MacroAssembler *masm, Register string, Register index, Register result, Label *call_runtime)
static const int kElementsOffset
static const int kContainsCachedArrayIndexMask
static Vector< T > New(int length)
friend class BlockTrampolinePoolScope
int ElementsKindToShiftSize(ElementsKind elements_kind)
Vector< const char > CStrVector(const char *data)
static int OffsetOfElementAt(int index)
static const int kLengthOffset
static int SizeFor(int length)
static const int kHeaderSize
static const int kEnumerationIndexOffset
static const int kMapOffset
static const int kValueOffset
static const int kLengthOffset
static Address GetDeoptimizationEntry(int id, BailoutType type)
MemOperand FieldMemOperand(Register object, int offset)
static const int kHasNonInstancePrototype
static const int kContextOffset
static const int kFunctionOffset
ElementsKind GetInitialFastElementsKind()
static const uint32_t kSignMask
Condition NegateCondition(Condition cond)
#define ASSERT_EQ(v1, v2)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination trace on stack replacement optimize closures functions with arguments object optimize functions containing for in loops profiler considers IC stability primitive functions trigger their own optimization re try self optimization if it failed insert an interrupt check at function exit execution budget before interrupt is triggered call count before self optimization self_optimization count_based_interrupts weighted_back_edges trace_opt emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 enable use of ARMv7 instructions if enable use of MIPS FPU instructions if NULL
static const int kElementsKindShift
SwVfpRegister low() const
static const int kConstructorOffset
static double canonical_not_the_hole_nan_as_double()
static const int kIsUndetectable
static const int kHeaderSize
static const int kMaximumClonedProperties
static const int kInstrSize
static const int kPrototypeOffset
#define RUNTIME_ENTRY(name, nargs, ressize)
static const int kValueOffset
static const int kMarkerOffset
static const int kExponentBits
static const int kCompilerHintsOffset
static const int kSharedFunctionInfoOffset
Register ToRegister(int num)
static const int kBitField2Offset
static HValue * cast(HValue *value)
static Handle< Code > GetUninitialized(Token::Value op)
static const int kMaximumClonedLength
static const int kExponentOffset
static const int kValueOffset
bool EvalComparison(Token::Value op, double op1, double op2)
static JSObject * cast(Object *obj)
bool IsFastDoubleElementsKind(ElementsKind kind)
SafepointGenerator(LCodeGen *codegen, LPointerMap *pointers, Safepoint::DeoptMode mode)
static const int kInstanceTypeOffset
virtual void BeforeCall(int call_size) const
static const int kMantissaOffset