30 #if defined(V8_TARGET_ARCH_X64)
42 class SafepointGenerator :
public CallWrapper {
45 LPointerMap* pointers,
46 Safepoint::DeoptMode mode)
57 codegen_->RecordSafepoint(pointers_, deopt_mode_);
62 LPointerMap* pointers_;
63 Safepoint::DeoptMode deopt_mode_;
69 bool LCodeGen::GenerateCode() {
70 HPhase phase(
"Z_Code generation", chunk());
77 FrameScope frame_scope(masm_, StackFrame::MANUAL);
79 return GeneratePrologue() &&
81 GenerateDeferredCode() &&
82 GenerateJumpTable() &&
83 GenerateSafepointTable();
87 void LCodeGen::FinishCode(Handle<Code>
code) {
89 code->set_stack_slots(GetStackSlotCount());
90 code->set_safepoint_table_offset(safepoints_.GetCodeOffset());
91 PopulateDeoptimizationData(code);
95 void LChunkBuilder::Abort(
const char* reason) {
96 info()->set_bailout_reason(reason);
101 void LCodeGen::Comment(
const char* format, ...) {
102 if (!FLAG_code_comments)
return;
104 StringBuilder builder(buffer,
ARRAY_SIZE(buffer));
106 va_start(arguments, format);
107 builder.AddFormattedList(format, arguments);
112 int length = builder.position();
114 memcpy(copy.start(), builder.Finalize(), copy.length());
115 masm()->RecordComment(copy.start());
119 bool LCodeGen::GeneratePrologue() {
125 if (strlen(FLAG_stop_at) > 0 &&
126 info_->function()->name()->IsEqualTo(
CStrVector(FLAG_stop_at))) {
135 if (!info_->is_classic_mode() || info_->is_native()) {
138 __ j(
zero, &ok, Label::kNear);
140 int receiver_offset = (scope()->num_parameters() + 1) *
kPointerSize;
152 int slots = GetStackSlotCount();
154 if (FLAG_debug_code) {
168 const int kPageSize = 4 *
KB;
171 offset -= kPageSize) {
180 if (heap_slots > 0) {
181 Comment(
";;; Allocate local context");
185 FastNewContextStub stub(heap_slots);
188 __ CallRuntime(Runtime::kNewFunctionContext, 1);
190 RecordSafepoint(Safepoint::kNoLazyDeopt);
196 int num_parameters = scope()->num_parameters();
197 for (
int i = 0; i < num_parameters; i++) {
198 Variable* var = scope()->parameter(i);
199 if (var->IsContextSlot()) {
203 __ movq(
rax, Operand(
rbp, parameter_offset));
206 __ movq(Operand(
rsi, context_offset),
rax);
211 Comment(
";;; End allocate local context");
216 __ CallRuntime(Runtime::kTraceEnter, 0);
218 return !is_aborted();
222 bool LCodeGen::GenerateBody() {
224 bool emit_instructions =
true;
225 for (current_instruction_ = 0;
226 !is_aborted() && current_instruction_ < instructions_->length();
227 current_instruction_++) {
228 LInstruction* instr = instructions_->at(current_instruction_);
229 if (instr->IsLabel()) {
231 emit_instructions = !label->HasReplacement();
234 if (emit_instructions) {
235 Comment(
";;; @%d: %s.", current_instruction_, instr->Mnemonic());
236 instr->CompileToNative(
this);
240 return !is_aborted();
244 bool LCodeGen::GenerateJumpTable() {
245 for (
int i = 0; i < jump_table_.length(); i++) {
246 __ bind(&jump_table_[i].label);
249 return !is_aborted();
253 bool LCodeGen::GenerateDeferredCode() {
255 if (deferred_.length() > 0) {
256 for (
int i = 0; !is_aborted() && i < deferred_.length(); i++) {
257 LDeferredCode* code = deferred_[i];
258 __ bind(code->entry());
259 Comment(
";;; Deferred code @%d: %s.",
260 code->instruction_index(),
261 code->instr()->Mnemonic());
263 __ jmp(code->exit());
269 if (!is_aborted()) status_ =
DONE;
270 return !is_aborted();
274 bool LCodeGen::GenerateSafepointTable() {
276 safepoints_.Emit(masm(), GetStackSlotCount());
277 return !is_aborted();
286 XMMRegister LCodeGen::ToDoubleRegister(
int index)
const {
297 XMMRegister LCodeGen::ToDoubleRegister(LOperand* op)
const {
298 ASSERT(op->IsDoubleRegister());
299 return ToDoubleRegister(op->index());
303 bool LCodeGen::IsInteger32Constant(LConstantOperand* op)
const {
304 return op->IsConstantOperand() &&
305 chunk_->LookupLiteralRepresentation(op).IsInteger32();
309 bool LCodeGen::IsTaggedConstant(LConstantOperand* op)
const {
310 return op->IsConstantOperand() &&
311 chunk_->LookupLiteralRepresentation(op).IsTagged();
315 int LCodeGen::ToInteger32(LConstantOperand* op)
const {
316 HConstant* constant = chunk_->LookupConstant(op);
317 ASSERT(chunk_->LookupLiteralRepresentation(op).IsInteger32());
318 ASSERT(constant->HasInteger32Value());
319 return constant->Integer32Value();
323 double LCodeGen::ToDouble(LConstantOperand* op)
const {
324 HConstant* constant = chunk_->LookupConstant(op);
325 ASSERT(constant->HasDoubleValue());
326 return constant->DoubleValue();
330 Handle<Object> LCodeGen::ToHandle(LConstantOperand* op)
const {
331 HConstant* constant = chunk_->LookupConstant(op);
332 ASSERT(chunk_->LookupLiteralRepresentation(op).IsTagged());
333 return constant->handle();
337 Operand LCodeGen::ToOperand(LOperand* op)
const {
340 ASSERT(op->IsStackSlot() || op->IsDoubleStackSlot());
341 int index = op->index();
353 void LCodeGen::WriteTranslation(LEnvironment* environment,
354 Translation* translation,
355 int* arguments_index,
356 int* arguments_count) {
357 if (environment ==
NULL)
return;
360 int translation_size = environment->values()->length();
362 int height = translation_size - environment->parameter_count();
368 *arguments_index = -environment->parameter_count();
369 *arguments_count = environment->parameter_count();
371 WriteTranslation(environment->outer(),
375 int closure_id = *info()->closure() != *environment->closure()
376 ? DefineDeoptimizationLiteral(environment->closure())
377 : Translation::kSelfLiteralId;
379 switch (environment->frame_type()) {
381 translation->BeginJSFrame(environment->ast_id(), closure_id, height);
384 translation->BeginConstructStubFrame(closure_id, translation_size);
387 ASSERT(translation_size == 1);
389 translation->BeginGetterStubFrame(closure_id);
392 ASSERT(translation_size == 2);
394 translation->BeginSetterStubFrame(closure_id);
397 translation->BeginArgumentsAdaptorFrame(closure_id, translation_size);
403 if (environment->entry() !=
NULL &&
404 environment->entry()->arguments_pushed()) {
405 *arguments_index = *arguments_index < 0
406 ? GetStackSlotCount()
407 : *arguments_index + *arguments_count;
408 *arguments_count = environment->entry()->arguments_count() + 1;
411 for (
int i = 0; i < translation_size; ++i) {
412 LOperand* value = environment->values()->at(i);
415 if (environment->spilled_registers() !=
NULL && value !=
NULL) {
416 if (value->IsRegister() &&
417 environment->spilled_registers()[value->index()] !=
NULL) {
418 translation->MarkDuplicate();
419 AddToTranslation(translation,
420 environment->spilled_registers()[value->index()],
421 environment->HasTaggedValueAt(i),
422 environment->HasUint32ValueAt(i),
426 value->IsDoubleRegister() &&
427 environment->spilled_double_registers()[value->index()] !=
NULL) {
428 translation->MarkDuplicate();
431 environment->spilled_double_registers()[value->index()],
439 AddToTranslation(translation,
441 environment->HasTaggedValueAt(i),
442 environment->HasUint32ValueAt(i),
449 void LCodeGen::AddToTranslation(Translation* translation,
454 int arguments_count) {
459 translation->StoreArgumentsObject(arguments_index, arguments_count);
460 }
else if (op->IsStackSlot()) {
462 translation->StoreStackSlot(op->index());
463 }
else if (is_uint32) {
464 translation->StoreUint32StackSlot(op->index());
466 translation->StoreInt32StackSlot(op->index());
468 }
else if (op->IsDoubleStackSlot()) {
469 translation->StoreDoubleStackSlot(op->index());
470 }
else if (op->IsArgument()) {
472 int src_index = GetStackSlotCount() + op->index();
473 translation->StoreStackSlot(src_index);
474 }
else if (op->IsRegister()) {
477 translation->StoreRegister(reg);
478 }
else if (is_uint32) {
479 translation->StoreUint32Register(reg);
481 translation->StoreInt32Register(reg);
483 }
else if (op->IsDoubleRegister()) {
484 XMMRegister reg = ToDoubleRegister(op);
485 translation->StoreDoubleRegister(reg);
486 }
else if (op->IsConstantOperand()) {
488 int src_index = DefineDeoptimizationLiteral(constant->handle());
489 translation->StoreLiteral(src_index);
496 void LCodeGen::CallCodeGeneric(Handle<Code> code,
497 RelocInfo::Mode mode,
499 SafepointMode safepoint_mode,
503 LPointerMap* pointers = instr->pointer_map();
504 RecordPosition(pointers->position());
506 RecordSafepointWithLazyDeopt(instr, safepoint_mode, argc);
510 if (code->kind() == Code::BINARY_OP_IC ||
511 code->kind() == Code::COMPARE_IC) {
517 void LCodeGen::CallCode(Handle<Code> code,
518 RelocInfo::Mode mode,
519 LInstruction* instr) {
520 CallCodeGeneric(code, mode, instr, RECORD_SIMPLE_SAFEPOINT, 0);
524 void LCodeGen::CallRuntime(
const Runtime::Function*
function,
526 LInstruction* instr) {
528 ASSERT(instr->HasPointerMap());
529 LPointerMap* pointers = instr->pointer_map();
530 RecordPosition(pointers->position());
532 __ CallRuntime(
function, num_arguments);
533 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT, 0);
539 LInstruction* instr) {
541 __ CallRuntimeSaveDoubles(
id);
542 RecordSafepointWithRegisters(
543 instr->pointer_map(), argc, Safepoint::kNoLazyDeopt);
547 void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment,
548 Safepoint::DeoptMode mode) {
549 if (!environment->HasBeenRegistered()) {
564 int jsframe_count = 0;
567 for (LEnvironment* e = environment; e !=
NULL; e = e->outer()) {
573 Translation translation(&translations_, frame_count, jsframe_count, zone());
574 WriteTranslation(environment, &translation, &args_index, &args_count);
575 int deoptimization_index = deoptimizations_.length();
576 int pc_offset = masm()->pc_offset();
577 environment->Register(deoptimization_index,
579 (mode == Safepoint::kLazyDeopt) ? pc_offset : -1);
580 deoptimizations_.Add(environment, environment->zone());
585 void LCodeGen::DeoptimizeIf(
Condition cc, LEnvironment* environment) {
586 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt);
587 ASSERT(environment->HasBeenRegistered());
588 int id = environment->deoptimization_index();
591 Abort(
"bailout was not prepared");
600 if (jump_table_.is_empty() ||
601 jump_table_.last().address != entry) {
602 jump_table_.Add(JumpTableEntry(entry), zone());
604 __ j(cc, &jump_table_.last().label);
609 void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) {
610 int length = deoptimizations_.length();
611 if (length == 0)
return;
612 Handle<DeoptimizationInputData> data =
613 factory()->NewDeoptimizationInputData(length,
TENURED);
615 Handle<ByteArray> translations = translations_.CreateByteArray();
616 data->SetTranslationByteArray(*translations);
617 data->SetInlinedFunctionCount(
Smi::FromInt(inlined_function_count_));
619 Handle<FixedArray> literals =
620 factory()->NewFixedArray(deoptimization_literals_.length(),
TENURED);
621 for (
int i = 0; i < deoptimization_literals_.length(); i++) {
622 literals->set(i, *deoptimization_literals_[i]);
624 data->SetLiteralArray(*literals);
626 data->SetOsrAstId(
Smi::FromInt(info_->osr_ast_id().ToInt()));
630 for (
int i = 0; i < length; i++) {
631 LEnvironment* env = deoptimizations_[i];
632 data->SetAstId(i, env->ast_id());
633 data->SetTranslationIndex(i,
Smi::FromInt(env->translation_index()));
634 data->SetArgumentsStackHeight(i,
638 code->set_deoptimization_data(*data);
642 int LCodeGen::DefineDeoptimizationLiteral(Handle<Object> literal) {
643 int result = deoptimization_literals_.length();
644 for (
int i = 0; i < deoptimization_literals_.length(); ++i) {
645 if (deoptimization_literals_[i].is_identical_to(literal))
return i;
647 deoptimization_literals_.Add(literal, zone());
652 void LCodeGen::PopulateDeoptimizationLiteralsWithInlinedFunctions() {
653 ASSERT(deoptimization_literals_.length() == 0);
655 const ZoneList<Handle<JSFunction> >* inlined_closures =
656 chunk()->inlined_closures();
658 for (
int i = 0, length = inlined_closures->length();
661 DefineDeoptimizationLiteral(inlined_closures->at(i));
664 inlined_function_count_ = deoptimization_literals_.length();
668 void LCodeGen::RecordSafepointWithLazyDeopt(
669 LInstruction* instr, SafepointMode safepoint_mode,
int argc) {
670 if (safepoint_mode == RECORD_SIMPLE_SAFEPOINT) {
671 RecordSafepoint(instr->pointer_map(), Safepoint::kLazyDeopt);
673 ASSERT(safepoint_mode == RECORD_SAFEPOINT_WITH_REGISTERS);
674 RecordSafepointWithRegisters(
675 instr->pointer_map(), argc, Safepoint::kLazyDeopt);
680 void LCodeGen::RecordSafepoint(
681 LPointerMap* pointers,
682 Safepoint::Kind kind,
684 Safepoint::DeoptMode deopt_mode) {
685 ASSERT(kind == expected_safepoint_kind_);
687 const ZoneList<LOperand*>* operands = pointers->GetNormalizedOperands();
689 Safepoint safepoint = safepoints_.DefineSafepoint(masm(),
690 kind, arguments, deopt_mode);
691 for (
int i = 0; i < operands->length(); i++) {
692 LOperand* pointer = operands->at(i);
693 if (pointer->IsStackSlot()) {
694 safepoint.DefinePointerSlot(pointer->index(), zone());
695 }
else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) {
696 safepoint.DefinePointerRegister(
ToRegister(pointer), zone());
699 if (kind & Safepoint::kWithRegisters) {
701 safepoint.DefinePointerRegister(
rsi, zone());
706 void LCodeGen::RecordSafepoint(LPointerMap* pointers,
707 Safepoint::DeoptMode deopt_mode) {
708 RecordSafepoint(pointers, Safepoint::kSimple, 0, deopt_mode);
712 void LCodeGen::RecordSafepoint(Safepoint::DeoptMode deopt_mode) {
713 LPointerMap empty_pointers(RelocInfo::kNoPosition, zone());
714 RecordSafepoint(&empty_pointers, deopt_mode);
718 void LCodeGen::RecordSafepointWithRegisters(LPointerMap* pointers,
720 Safepoint::DeoptMode deopt_mode) {
721 RecordSafepoint(pointers, Safepoint::kWithRegisters, arguments, deopt_mode);
725 void LCodeGen::RecordPosition(
int position) {
726 if (position == RelocInfo::kNoPosition)
return;
727 masm()->positions_recorder()->RecordPosition(position);
731 void LCodeGen::DoLabel(LLabel* label) {
732 if (label->is_loop_header()) {
733 Comment(
";;; B%d - LOOP entry", label->block_id());
735 Comment(
";;; B%d", label->block_id());
737 __ bind(label->label());
738 current_block_ = label->block_id();
743 void LCodeGen::DoParallelMove(LParallelMove* move) {
744 resolver_.Resolve(move);
748 void LCodeGen::DoGap(LGap* gap) {
753 LParallelMove* move = gap->GetParallelMove(inner_pos);
754 if (move !=
NULL) DoParallelMove(move);
759 void LCodeGen::DoInstructionGap(LInstructionGap* instr) {
764 void LCodeGen::DoParameter(LParameter* instr) {
769 void LCodeGen::DoCallStub(LCallStub* instr) {
771 switch (instr->hydrogen()->major_key()) {
772 case CodeStub::RegExpConstructResult: {
773 RegExpConstructResultStub stub;
774 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
777 case CodeStub::RegExpExec: {
779 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
784 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
787 case CodeStub::NumberToString: {
788 NumberToStringStub stub;
789 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
792 case CodeStub::StringAdd: {
794 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
797 case CodeStub::StringCompare: {
798 StringCompareStub stub;
799 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
802 case CodeStub::TranscendentalCache: {
803 TranscendentalCacheStub stub(instr->transcendental_type(),
805 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
814 void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) {
819 void LCodeGen::DoModI(LModI* instr) {
820 if (instr->hydrogen()->HasPowerOf2Divisor()) {
821 Register dividend =
ToRegister(instr->left());
826 if (divisor < 0) divisor = -divisor;
828 Label positive_dividend, done;
829 __ testl(dividend, dividend);
830 __ j(
not_sign, &positive_dividend, Label::kNear);
832 __ andl(dividend, Immediate(divisor - 1));
838 __ jmp(&done, Label::kNear);
840 __ bind(&positive_dividend);
841 __ andl(dividend, Immediate(divisor - 1));
844 Label done, remainder_eq_dividend, slow, do_subtraction, both_positive;
845 Register left_reg =
ToRegister(instr->left());
846 Register right_reg =
ToRegister(instr->right());
847 Register result_reg =
ToRegister(instr->result());
856 __ testl(right_reg, right_reg);
857 DeoptimizeIf(
zero, instr->environment());
860 __ testl(left_reg, left_reg);
861 __ j(
zero, &remainder_eq_dividend, Label::kNear);
862 __ j(
sign, &slow, Label::kNear);
864 __ testl(right_reg, right_reg);
869 __ bind(&both_positive);
872 __ cmpl(left_reg, right_reg);
873 __ j(
less, &remainder_eq_dividend, Label::kNear);
877 __ movl(scratch, right_reg);
878 __ subl(scratch, Immediate(1));
879 __ testl(scratch, right_reg);
880 __ j(
not_zero, &do_subtraction, Label::kNear);
881 __ andl(left_reg, scratch);
882 __ jmp(&remainder_eq_dividend, Label::kNear);
884 __ bind(&do_subtraction);
885 const int kUnfolds = 3;
887 __ movl(scratch, left_reg);
888 for (
int i = 0; i < kUnfolds; i++) {
890 __ subl(left_reg, right_reg);
892 __ cmpl(left_reg, right_reg);
893 __ j(
less, &remainder_eq_dividend, Label::kNear);
895 __ movl(left_reg, scratch);
907 __ testl(left_reg, left_reg);
912 __ testl(result_reg, result_reg);
916 __ bind(&positive_left);
922 __ jmp(&done, Label::kNear);
924 __ bind(&remainder_eq_dividend);
925 __ movl(result_reg, left_reg);
932 void LCodeGen::DoMathFloorOfDiv(LMathFloorOfDiv* instr) {
933 ASSERT(instr->right()->IsConstantOperand());
935 const Register dividend =
ToRegister(instr->left());
937 const Register result =
ToRegister(instr->result());
945 if (!result.is(dividend)) {
946 __ movl(result, dividend);
951 if (!result.is(dividend)) {
952 __ movl(result, dividend);
956 DeoptimizeIf(
zero, instr->environment());
959 DeoptimizeIf(
overflow, instr->environment());
964 uint32_t divisor_abs = abs(divisor);
968 __ movsxlq(result, dividend);
971 DeoptimizeIf(
zero, instr->environment());
973 __ sar(result, Immediate(power));
975 if (!result.is(dividend)) {
976 __ movl(result, dividend);
978 __ sarl(result, Immediate(power));
985 unsigned b = 31 - CompilerIntrinsics::CountLeadingZeros(divisor_abs);
986 unsigned shift = 32 + b;
987 double multiplier_f =
988 static_cast<double>(
static_cast<uint64_t
>(1) << shift) / divisor_abs;
990 if (multiplier_f - floor(multiplier_f) < 0.5) {
991 multiplier =
static_cast<int64_t
>(floor(multiplier_f));
993 multiplier =
static_cast<int64_t
>(floor(multiplier_f)) + 1;
997 multiplier < (static_cast<int64_t>(1) << 32));
999 __ movsxlq(reg1, dividend);
1003 DeoptimizeIf(
zero, instr->environment());
1007 __ imul(reg2, reg1);
1009 __ addq(reg2, Immediate(1 << 30));
1010 __ sar(reg2, Immediate(shift));
1015 void LCodeGen::DoDivI(LDivI* instr) {
1016 LOperand* right = instr->right();
1022 Register left_reg =
rax;
1027 __ testl(right_reg, right_reg);
1028 DeoptimizeIf(
zero, instr->environment());
1033 Label left_not_zero;
1034 __ testl(left_reg, left_reg);
1035 __ j(
not_zero, &left_not_zero, Label::kNear);
1036 __ testl(right_reg, right_reg);
1037 DeoptimizeIf(
sign, instr->environment());
1038 __ bind(&left_not_zero);
1043 Label left_not_min_int;
1045 __ j(
not_zero, &left_not_min_int, Label::kNear);
1046 __ cmpl(right_reg, Immediate(-1));
1047 DeoptimizeIf(
zero, instr->environment());
1048 __ bind(&left_not_min_int);
1053 __ idivl(right_reg);
1057 DeoptimizeIf(
not_zero, instr->environment());
1061 void LCodeGen::DoMulI(LMulI* instr) {
1063 LOperand* right = instr->right();
1071 if (right->IsConstantOperand()) {
1073 if (right_value == -1) {
1075 }
else if (right_value == 0) {
1076 __ xorl(left, left);
1077 }
else if (right_value == 2) {
1078 __ addl(left, left);
1079 }
else if (!can_overflow) {
1083 switch (right_value) {
1088 __ leal(left, Operand(left, left,
times_2, 0));
1091 __ shll(left, Immediate(2));
1094 __ leal(left, Operand(left, left,
times_4, 0));
1097 __ shll(left, Immediate(3));
1100 __ leal(left, Operand(left, left,
times_8, 0));
1103 __ shll(left, Immediate(4));
1106 __ imull(left, left, Immediate(right_value));
1110 __ imull(left, left, Immediate(right_value));
1112 }
else if (right->IsStackSlot()) {
1113 __ imull(left, ToOperand(right));
1119 DeoptimizeIf(
overflow, instr->environment());
1125 __ testl(left, left);
1127 if (right->IsConstantOperand()) {
1132 DeoptimizeIf(
less, instr->environment());
1134 }
else if (right->IsStackSlot()) {
1136 DeoptimizeIf(
sign, instr->environment());
1140 DeoptimizeIf(
sign, instr->environment());
1147 void LCodeGen::DoBitI(LBitI* instr) {
1148 LOperand* left = instr->left();
1149 LOperand* right = instr->right();
1150 ASSERT(left->Equals(instr->result()));
1151 ASSERT(left->IsRegister());
1153 if (right->IsConstantOperand()) {
1155 switch (instr->op()) {
1156 case Token::BIT_AND:
1162 case Token::BIT_XOR:
1169 }
else if (right->IsStackSlot()) {
1170 switch (instr->op()) {
1171 case Token::BIT_AND:
1177 case Token::BIT_XOR:
1185 ASSERT(right->IsRegister());
1186 switch (instr->op()) {
1187 case Token::BIT_AND:
1193 case Token::BIT_XOR:
1204 void LCodeGen::DoShiftI(LShiftI* instr) {
1205 LOperand* left = instr->left();
1206 LOperand* right = instr->right();
1207 ASSERT(left->Equals(instr->result()));
1208 ASSERT(left->IsRegister());
1209 if (right->IsRegister()) {
1212 switch (instr->op()) {
1218 if (instr->can_deopt()) {
1220 DeoptimizeIf(
negative, instr->environment());
1232 uint8_t shift_count =
static_cast<uint8_t
>(value & 0x1F);
1233 switch (instr->op()) {
1235 if (shift_count != 0) {
1240 if (shift_count == 0 && instr->can_deopt()) {
1242 DeoptimizeIf(
negative, instr->environment());
1248 if (shift_count != 0) {
1260 void LCodeGen::DoSubI(LSubI* instr) {
1261 LOperand* left = instr->left();
1262 LOperand* right = instr->right();
1263 ASSERT(left->Equals(instr->result()));
1265 if (right->IsConstantOperand()) {
1268 }
else if (right->IsRegister()) {
1275 DeoptimizeIf(
overflow, instr->environment());
1280 void LCodeGen::DoConstantI(LConstantI* instr) {
1281 ASSERT(instr->result()->IsRegister());
1286 void LCodeGen::DoConstantD(LConstantD* instr) {
1287 ASSERT(instr->result()->IsDoubleRegister());
1288 XMMRegister res = ToDoubleRegister(instr->result());
1289 double v = instr->value();
1290 uint64_t int_val = BitCast<uint64_t, double>(v);
1297 __ Set(tmp, int_val);
1303 void LCodeGen::DoConstantT(LConstantT* instr) {
1304 Handle<Object> value = instr->value();
1305 if (value->IsSmi()) {
1309 Handle<HeapObject>::cast(value));
1314 void LCodeGen::DoJSArrayLength(LJSArrayLength* instr) {
1315 Register result =
ToRegister(instr->result());
1321 void LCodeGen::DoFixedArrayBaseLength(LFixedArrayBaseLength* instr) {
1322 Register result =
ToRegister(instr->result());
1328 void LCodeGen::DoMapEnumLength(LMapEnumLength* instr) {
1329 Register result =
ToRegister(instr->result());
1331 __ EnumLength(result, map);
1335 void LCodeGen::DoElementsKind(LElementsKind* instr) {
1336 Register result =
ToRegister(instr->result());
1349 void LCodeGen::DoValueOf(LValueOf* instr) {
1351 Register result =
ToRegister(instr->result());
1352 ASSERT(input.is(result));
1355 __ JumpIfSmi(input, &done, Label::kNear);
1366 void LCodeGen::DoDateField(LDateField* instr) {
1368 Register result =
ToRegister(instr->result());
1369 Smi* index = instr->index();
1370 Label runtime, done, not_date_object;
1371 ASSERT(
object.is(result));
1374 Condition cc = masm()->CheckSmi(
object);
1375 DeoptimizeIf(cc, instr->environment());
1377 DeoptimizeIf(
not_equal, instr->environment());
1379 if (index->value() == 0) {
1383 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
1393 __ PrepareCallCFunction(2);
1395 __ movq(
rcx,
object);
1398 __ movq(
rdi,
object);
1401 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
1408 void LCodeGen::DoBitNotI(LBitNotI* instr) {
1409 LOperand* input = instr->value();
1410 ASSERT(input->Equals(instr->result()));
1415 void LCodeGen::DoThrow(LThrow* instr) {
1417 CallRuntime(Runtime::kThrow, 1, instr);
1419 if (FLAG_debug_code) {
1420 Comment(
"Unreachable code.");
1426 void LCodeGen::DoAddI(LAddI* instr) {
1427 LOperand* left = instr->left();
1428 LOperand* right = instr->right();
1429 ASSERT(left->Equals(instr->result()));
1431 if (right->IsConstantOperand()) {
1434 }
else if (right->IsRegister()) {
1441 DeoptimizeIf(
overflow, instr->environment());
1446 void LCodeGen::DoMathMinMax(LMathMinMax* instr) {
1447 LOperand* left = instr->left();
1448 LOperand* right = instr->right();
1449 ASSERT(left->Equals(instr->result()));
1451 if (instr->hydrogen()->representation().IsInteger32()) {
1457 if (right->IsConstantOperand()) {
1458 Immediate right_imm =
1460 __ cmpq(left_reg, right_imm);
1461 __ j(condition, &return_left, Label::kNear);
1462 __ movq(left_reg, right_imm);
1463 }
else if (right->IsRegister()) {
1465 __ cmpq(left_reg, right_reg);
1466 __ j(condition, &return_left, Label::kNear);
1467 __ movq(left_reg, right_reg);
1469 Operand right_op = ToOperand(right);
1470 __ cmpq(left_reg, right_op);
1471 __ j(condition, &return_left, Label::kNear);
1472 __ movq(left_reg, right_op);
1474 __ bind(&return_left);
1476 ASSERT(instr->hydrogen()->representation().IsDouble());
1477 Label check_nan_left, check_zero, return_left, return_right;
1479 XMMRegister left_reg = ToDoubleRegister(left);
1480 XMMRegister right_reg = ToDoubleRegister(right);
1481 __ ucomisd(left_reg, right_reg);
1483 __ j(
equal, &check_zero, Label::kNear);
1484 __ j(condition, &return_left, Label::kNear);
1485 __ jmp(&return_right, Label::kNear);
1487 __ bind(&check_zero);
1488 XMMRegister xmm_scratch =
xmm0;
1489 __ xorps(xmm_scratch, xmm_scratch);
1490 __ ucomisd(left_reg, xmm_scratch);
1494 __ orpd(left_reg, right_reg);
1497 __ addsd(left_reg, right_reg);
1499 __ jmp(&return_left, Label::kNear);
1501 __ bind(&check_nan_left);
1502 __ ucomisd(left_reg, left_reg);
1504 __ bind(&return_right);
1505 __ movsd(left_reg, right_reg);
1507 __ bind(&return_left);
1512 void LCodeGen::DoArithmeticD(LArithmeticD* instr) {
1513 XMMRegister left = ToDoubleRegister(instr->left());
1514 XMMRegister right = ToDoubleRegister(instr->right());
1515 XMMRegister result = ToDoubleRegister(instr->result());
1517 ASSERT(instr->op() == Token::MOD || left.is(result));
1518 switch (instr->op()) {
1520 __ addsd(left, right);
1523 __ subsd(left, right);
1526 __ mulsd(left, right);
1529 __ divsd(left, right);
1532 __ PrepareCallCFunction(2);
1536 ExternalReference::double_fp_operation(Token::MOD, isolate()), 2);
1547 void LCodeGen::DoArithmeticT(LArithmeticT* instr) {
1553 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
1558 int LCodeGen::GetNextEmittedBlock(
int block) {
1559 for (
int i = block + 1; i < graph()->blocks()->length(); ++i) {
1560 LLabel* label = chunk_->GetLabel(i);
1561 if (!label->HasReplacement())
return i;
1567 void LCodeGen::EmitBranch(
int left_block,
int right_block,
Condition cc) {
1568 int next_block = GetNextEmittedBlock(current_block_);
1569 right_block = chunk_->LookupDestination(right_block);
1570 left_block = chunk_->LookupDestination(left_block);
1572 if (right_block == left_block) {
1573 EmitGoto(left_block);
1574 }
else if (left_block == next_block) {
1576 }
else if (right_block == next_block) {
1577 __ j(cc, chunk_->GetAssemblyLabel(left_block));
1579 __ j(cc, chunk_->GetAssemblyLabel(left_block));
1581 __ jmp(chunk_->GetAssemblyLabel(right_block));
1587 void LCodeGen::DoBranch(LBranch* instr) {
1588 int true_block = chunk_->LookupDestination(instr->true_block_id());
1589 int false_block = chunk_->LookupDestination(instr->false_block_id());
1591 Representation r = instr->hydrogen()->value()->representation();
1592 if (r.IsInteger32()) {
1595 EmitBranch(true_block, false_block,
not_zero);
1596 }
else if (r.IsDouble()) {
1597 XMMRegister reg = ToDoubleRegister(instr->value());
1600 EmitBranch(true_block, false_block,
not_equal);
1604 HType type = instr->hydrogen()->value()->type();
1605 if (type.IsBoolean()) {
1606 __ CompareRoot(reg, Heap::kTrueValueRootIndex);
1607 EmitBranch(true_block, false_block,
equal);
1608 }
else if (type.IsSmi()) {
1610 EmitBranch(true_block, false_block,
not_equal);
1612 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1613 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1615 ToBooleanStub::Types expected = instr->hydrogen()->expected_input_types();
1621 __ CompareRoot(reg, Heap::kUndefinedValueRootIndex);
1626 __ CompareRoot(reg, Heap::kTrueValueRootIndex);
1629 __ CompareRoot(reg, Heap::kFalseValueRootIndex);
1634 __ CompareRoot(reg, Heap::kNullValueRootIndex);
1642 __ JumpIfSmi(reg, true_label);
1643 }
else if (expected.NeedsMap()) {
1646 DeoptimizeIf(
zero, instr->environment());
1650 if (expected.NeedsMap()) {
1653 if (expected.CanBeUndetectable()) {
1674 __ jmp(false_label);
1675 __ bind(¬_string);
1680 Label not_heap_number;
1681 __ CompareRoot(map, Heap::kHeapNumberMapRootIndex);
1687 __ bind(¬_heap_number);
1697 void LCodeGen::EmitGoto(
int block) {
1698 block = chunk_->LookupDestination(block);
1699 int next_block = GetNextEmittedBlock(current_block_);
1700 if (block != next_block) {
1701 __ jmp(chunk_->GetAssemblyLabel(block));
1706 void LCodeGen::DoGoto(LGoto* instr) {
1707 EmitGoto(instr->block_id());
1715 case Token::EQ_STRICT:
1731 case Token::INSTANCEOF:
1739 void LCodeGen::DoCmpIDAndBranch(LCmpIDAndBranch* instr) {
1740 LOperand* left = instr->left();
1741 LOperand* right = instr->right();
1742 int false_block = chunk_->LookupDestination(instr->false_block_id());
1743 int true_block = chunk_->LookupDestination(instr->true_block_id());
1744 Condition cc = TokenToCondition(instr->op(), instr->is_double());
1746 if (left->IsConstantOperand() && right->IsConstantOperand()) {
1753 EmitGoto(next_block);
1755 if (instr->is_double()) {
1758 __ ucomisd(ToDoubleRegister(left), ToDoubleRegister(right));
1762 if (right->IsConstantOperand()) {
1765 }
else if (left->IsConstantOperand()) {
1767 if (right->IsRegister()) {
1770 __ cmpl(ToOperand(right), Immediate(value));
1775 if (right->IsRegister()) {
1782 EmitBranch(true_block, false_block, cc);
1787 void LCodeGen::DoCmpObjectEqAndBranch(LCmpObjectEqAndBranch* instr) {
1790 int false_block = chunk_->LookupDestination(instr->false_block_id());
1791 int true_block = chunk_->LookupDestination(instr->true_block_id());
1793 __ cmpq(left, right);
1794 EmitBranch(true_block, false_block,
equal);
1798 void LCodeGen::DoCmpConstantEqAndBranch(LCmpConstantEqAndBranch* instr) {
1800 int true_block = chunk_->LookupDestination(instr->true_block_id());
1801 int false_block = chunk_->LookupDestination(instr->false_block_id());
1803 __ cmpq(left, Immediate(instr->hydrogen()->right()));
1804 EmitBranch(true_block, false_block,
equal);
1808 void LCodeGen::DoIsNilAndBranch(LIsNilAndBranch* instr) {
1810 int false_block = chunk_->LookupDestination(instr->false_block_id());
1814 if (instr->hydrogen()->representation().IsSpecialization() ||
1815 instr->hydrogen()->type().IsSmi()) {
1816 EmitGoto(false_block);
1820 int true_block = chunk_->LookupDestination(instr->true_block_id());
1822 Heap::kNullValueRootIndex :
1823 Heap::kUndefinedValueRootIndex;
1824 __ CompareRoot(reg, nil_value);
1826 EmitBranch(true_block, false_block,
equal);
1829 Heap::kUndefinedValueRootIndex :
1830 Heap::kNullValueRootIndex;
1831 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1832 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1834 __ CompareRoot(reg, other_nil_value);
1836 __ JumpIfSmi(reg, false_label);
1839 Register scratch =
ToRegister(instr->temp());
1843 EmitBranch(true_block, false_block,
not_zero);
1848 Condition LCodeGen::EmitIsObject(Register input,
1849 Label* is_not_object,
1853 __ JumpIfSmi(input, is_not_object);
1855 __ CompareRoot(input, Heap::kNullValueRootIndex);
1873 void LCodeGen::DoIsObjectAndBranch(LIsObjectAndBranch* instr) {
1876 int true_block = chunk_->LookupDestination(instr->true_block_id());
1877 int false_block = chunk_->LookupDestination(instr->false_block_id());
1878 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1879 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1881 Condition true_cond = EmitIsObject(reg, false_label, true_label);
1883 EmitBranch(true_block, false_block, true_cond);
1887 Condition LCodeGen::EmitIsString(Register input,
1889 Label* is_not_string) {
1890 __ JumpIfSmi(input, is_not_string);
1891 Condition cond = masm_->IsObjectStringType(input, temp1, temp1);
1897 void LCodeGen::DoIsStringAndBranch(LIsStringAndBranch* instr) {
1901 int true_block = chunk_->LookupDestination(instr->true_block_id());
1902 int false_block = chunk_->LookupDestination(instr->false_block_id());
1903 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1905 Condition true_cond = EmitIsString(reg, temp, false_label);
1907 EmitBranch(true_block, false_block, true_cond);
1911 void LCodeGen::DoIsSmiAndBranch(LIsSmiAndBranch* instr) {
1912 int true_block = chunk_->LookupDestination(instr->true_block_id());
1913 int false_block = chunk_->LookupDestination(instr->false_block_id());
1916 if (instr->value()->IsRegister()) {
1918 is_smi = masm()->CheckSmi(input);
1920 Operand input = ToOperand(instr->value());
1921 is_smi = masm()->CheckSmi(input);
1923 EmitBranch(true_block, false_block, is_smi);
1927 void LCodeGen::DoIsUndetectableAndBranch(LIsUndetectableAndBranch* instr) {
1931 int true_block = chunk_->LookupDestination(instr->true_block_id());
1932 int false_block = chunk_->LookupDestination(instr->false_block_id());
1934 __ JumpIfSmi(input, chunk_->GetAssemblyLabel(false_block));
1938 EmitBranch(true_block, false_block,
not_zero);
1942 void LCodeGen::DoStringCompareAndBranch(LStringCompareAndBranch* instr) {
1944 int true_block = chunk_->LookupDestination(instr->true_block_id());
1945 int false_block = chunk_->LookupDestination(instr->false_block_id());
1948 CallCode(ic, RelocInfo::CODE_TARGET, instr);
1950 Condition condition = TokenToCondition(op,
false);
1953 EmitBranch(true_block, false_block, condition);
1957 static InstanceType TestType(HHasInstanceTypeAndBranch* instr) {
1966 static Condition BranchCondition(HHasInstanceTypeAndBranch* instr) {
1969 if (from == to)
return equal;
1977 void LCodeGen::DoHasInstanceTypeAndBranch(LHasInstanceTypeAndBranch* instr) {
1980 int true_block = chunk_->LookupDestination(instr->true_block_id());
1981 int false_block = chunk_->LookupDestination(instr->false_block_id());
1983 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1985 __ JumpIfSmi(input, false_label);
1988 EmitBranch(true_block, false_block, BranchCondition(instr->hydrogen()));
1992 void LCodeGen::DoGetCachedArrayIndex(LGetCachedArrayIndex* instr) {
1994 Register result =
ToRegister(instr->result());
1996 __ AssertString(input);
2000 __ IndexFromHash(result, result);
2004 void LCodeGen::DoHasCachedArrayIndexAndBranch(
2005 LHasCachedArrayIndexAndBranch* instr) {
2008 int true_block = chunk_->LookupDestination(instr->true_block_id());
2009 int false_block = chunk_->LookupDestination(instr->false_block_id());
2013 EmitBranch(true_block, false_block,
equal);
2019 void LCodeGen::EmitClassOfTest(Label* is_true,
2021 Handle<String> class_name,
2026 ASSERT(!input.is(temp2));
2029 __ JumpIfSmi(input, is_false);
2031 if (class_name->IsEqualTo(
CStrVector(
"Function"))) {
2062 if (class_name->IsEqualTo(
CStrVector(
"Object"))) {
2079 ASSERT(class_name->IsSymbol());
2080 __ Cmp(temp, class_name);
2085 void LCodeGen::DoClassOfTestAndBranch(LClassOfTestAndBranch* instr) {
2089 Handle<String> class_name = instr->hydrogen()->class_name();
2091 int true_block = chunk_->LookupDestination(instr->true_block_id());
2092 int false_block = chunk_->LookupDestination(instr->false_block_id());
2094 Label* true_label = chunk_->GetAssemblyLabel(true_block);
2095 Label* false_label = chunk_->GetAssemblyLabel(false_block);
2097 EmitClassOfTest(true_label, false_label, class_name, input, temp, temp2);
2099 EmitBranch(true_block, false_block,
equal);
2103 void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) {
2105 int true_block = instr->true_block_id();
2106 int false_block = instr->false_block_id();
2109 EmitBranch(true_block, false_block,
equal);
2113 void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
2117 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
2118 Label true_value, done;
2120 __ j(
zero, &true_value, Label::kNear);
2121 __ LoadRoot(
ToRegister(instr->result()), Heap::kFalseValueRootIndex);
2122 __ jmp(&done, Label::kNear);
2123 __ bind(&true_value);
2124 __ LoadRoot(
ToRegister(instr->result()), Heap::kTrueValueRootIndex);
2129 void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
2130 class DeferredInstanceOfKnownGlobal:
public LDeferredCode {
2132 DeferredInstanceOfKnownGlobal(LCodeGen* codegen,
2133 LInstanceOfKnownGlobal* instr)
2134 : LDeferredCode(codegen), instr_(instr) { }
2135 virtual void Generate() {
2136 codegen()->DoDeferredInstanceOfKnownGlobal(instr_, &map_check_);
2138 virtual LInstruction* instr() {
return instr_; }
2139 Label* map_check() {
return &map_check_; }
2141 LInstanceOfKnownGlobal* instr_;
2146 DeferredInstanceOfKnownGlobal* deferred;
2147 deferred =
new(zone()) DeferredInstanceOfKnownGlobal(
this, instr);
2149 Label done, false_result;
2150 Register
object =
ToRegister(instr->value());
2153 __ JumpIfSmi(
object, &false_result);
2162 __ bind(deferred->map_check());
2163 Handle<JSGlobalPropertyCell> cache_cell =
2164 factory()->NewJSGlobalPropertyCell(factory()->the_hole_value());
2169 __ LoadRoot(
ToRegister(instr->result()), Heap::kTheHoleValueRootIndex);
2172 Label end_of_patched_code;
2173 __ bind(&end_of_patched_code);
2180 __ bind(&cache_miss);
2181 __ CompareRoot(
object, Heap::kNullValueRootIndex);
2182 __ j(
equal, &false_result, Label::kNear);
2187 __ bind(&false_result);
2188 __ LoadRoot(
ToRegister(instr->result()), Heap::kFalseValueRootIndex);
2190 __ bind(deferred->exit());
2195 void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
2198 PushSafepointRegistersScope scope(
this);
2201 InstanceofStub stub(flags);
2204 __ PushHeapObject(instr->function());
2206 static const int kAdditionalDelta = 10;
2208 masm_->SizeOfCodeGeneratedSince(map_check) + kAdditionalDelta;
2210 __ push_imm32(delta);
2216 CallCodeGeneric(stub.GetCode(),
2217 RelocInfo::CODE_TARGET,
2219 RECORD_SAFEPOINT_WITH_REGISTERS,
2221 ASSERT(delta == masm_->SizeOfCodeGeneratedSince(map_check));
2222 LEnvironment* env = instr->GetDeferredLazyDeoptimizationEnvironment();
2223 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
2232 __ LoadRoot(
rax, Heap::kTrueValueRootIndex);
2234 __ bind(&load_false);
2235 __ LoadRoot(
rax, Heap::kFalseValueRootIndex);
2240 void LCodeGen::DoCmpT(LCmpT* instr) {
2244 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2246 Condition condition = TokenToCondition(op,
false);
2247 Label true_value, done;
2249 __ j(condition, &true_value, Label::kNear);
2250 __ LoadRoot(
ToRegister(instr->result()), Heap::kFalseValueRootIndex);
2251 __ jmp(&done, Label::kNear);
2252 __ bind(&true_value);
2253 __ LoadRoot(
ToRegister(instr->result()), Heap::kTrueValueRootIndex);
2258 void LCodeGen::DoReturn(LReturn* instr) {
2263 __ CallRuntime(Runtime::kTraceExit, 1);
2271 void LCodeGen::DoLoadGlobalCell(LLoadGlobalCell* instr) {
2272 Register result =
ToRegister(instr->result());
2273 __ LoadGlobalCell(result, instr->hydrogen()->cell());
2274 if (instr->hydrogen()->RequiresHoleCheck()) {
2275 __ CompareRoot(result, Heap::kTheHoleValueRootIndex);
2276 DeoptimizeIf(
equal, instr->environment());
2281 void LCodeGen::DoLoadGlobalGeneric(LLoadGlobalGeneric* instr) {
2285 __ Move(
rcx, instr->name());
2286 RelocInfo::Mode mode = instr->for_typeof() ? RelocInfo::CODE_TARGET :
2287 RelocInfo::CODE_TARGET_CONTEXT;
2288 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
2289 CallCode(ic, mode, instr);
2293 void LCodeGen::DoStoreGlobalCell(LStoreGlobalCell* instr) {
2295 Handle<JSGlobalPropertyCell> cell_handle = instr->hydrogen()->cell();
2301 if (instr->hydrogen()->RequiresHoleCheck()) {
2305 __ movq(cell, cell_handle, RelocInfo::GLOBAL_PROPERTY_CELL);
2306 __ CompareRoot(Operand(cell, 0), Heap::kTheHoleValueRootIndex);
2307 DeoptimizeIf(
equal, instr->environment());
2309 __ movq(Operand(cell, 0), value);
2319 void LCodeGen::DoStoreGlobalGeneric(LStoreGlobalGeneric* instr) {
2323 __ Move(
rcx, instr->name());
2324 Handle<Code> ic = (instr->strict_mode_flag() ==
kStrictMode)
2325 ? isolate()->builtins()->StoreIC_Initialize_Strict()
2326 : isolate()->builtins()->StoreIC_Initialize();
2327 CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr);
2331 void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) {
2332 Register context =
ToRegister(instr->context());
2333 Register result =
ToRegister(instr->result());
2335 if (instr->hydrogen()->RequiresHoleCheck()) {
2336 __ CompareRoot(result, Heap::kTheHoleValueRootIndex);
2337 if (instr->hydrogen()->DeoptimizesOnHole()) {
2338 DeoptimizeIf(
equal, instr->environment());
2342 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
2343 __ bind(&is_not_hole);
2349 void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) {
2350 Register context =
ToRegister(instr->context());
2355 Label skip_assignment;
2356 if (instr->hydrogen()->RequiresHoleCheck()) {
2357 __ CompareRoot(target, Heap::kTheHoleValueRootIndex);
2358 if (instr->hydrogen()->DeoptimizesOnHole()) {
2359 DeoptimizeIf(
equal, instr->environment());
2364 __ movq(target, value);
2366 if (instr->hydrogen()->NeedsWriteBarrier()) {
2367 HType type = instr->hydrogen()->value()->type();
2371 Register scratch =
ToRegister(instr->temp());
2372 __ RecordWriteContextSlot(context,
2381 __ bind(&skip_assignment);
2385 void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) {
2386 Register
object =
ToRegister(instr->object());
2387 Register result =
ToRegister(instr->result());
2388 if (instr->hydrogen()->is_in_object()) {
2389 __ movq(result,
FieldOperand(
object, instr->hydrogen()->offset()));
2392 __ movq(result,
FieldOperand(result, instr->hydrogen()->offset()));
2397 void LCodeGen::EmitLoadFieldOrConstantFunction(Register result,
2400 Handle<String> name,
2401 LEnvironment* env) {
2402 LookupResult lookup(isolate());
2403 type->LookupDescriptor(
NULL, *name, &lookup);
2404 ASSERT(lookup.IsFound() || lookup.IsCacheable());
2405 if (lookup.IsField()) {
2406 int index = lookup.GetLocalFieldIndexFromMap(*type);
2411 __ movq(result,
FieldOperand(
object, offset + type->instance_size()));
2417 }
else if (lookup.IsConstantFunction()) {
2418 Handle<JSFunction>
function(lookup.GetConstantFunctionFromMap(*type));
2419 __ LoadHeapObject(result,
function);
2424 Heap* heap = type->GetHeap();
2425 while (*current != heap->null_value()) {
2426 __ LoadHeapObject(result, current);
2428 Handle<Map>(current->map()));
2433 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
2440 static bool CompactEmit(SmallMapList* list,
2441 Handle<String> name,
2444 Handle<Map> map = list->at(i);
2447 if (map->HasElementsTransition())
return false;
2448 LookupResult lookup(isolate);
2449 map->LookupDescriptor(
NULL, *name, &lookup);
2450 return lookup.IsField() || lookup.IsConstantFunction();
2454 void LCodeGen::DoLoadNamedFieldPolymorphic(LLoadNamedFieldPolymorphic* instr) {
2455 Register
object =
ToRegister(instr->object());
2456 Register result =
ToRegister(instr->result());
2458 int map_count = instr->hydrogen()->types()->length();
2459 bool need_generic = instr->hydrogen()->need_generic();
2461 if (map_count == 0 && !need_generic) {
2465 Handle<String> name = instr->hydrogen()->name();
2467 bool all_are_compact =
true;
2468 for (
int i = 0; i < map_count; ++i) {
2469 if (!CompactEmit(instr->hydrogen()->types(), name, i, isolate())) {
2470 all_are_compact =
false;
2474 for (
int i = 0; i < map_count; ++i) {
2475 bool last = (i == map_count - 1);
2476 Handle<Map> map = instr->hydrogen()->types()->at(i);
2479 if (last && !need_generic) {
2480 DeoptimizeIf(
not_equal, instr->environment());
2481 __ bind(&check_passed);
2482 EmitLoadFieldOrConstantFunction(
2483 result,
object, map, name, instr->environment());
2486 bool compact = all_are_compact ?
true :
2487 CompactEmit(instr->hydrogen()->types(), name, i, isolate());
2488 __ j(
not_equal, &next, compact ? Label::kNear : Label::kFar);
2489 __ bind(&check_passed);
2490 EmitLoadFieldOrConstantFunction(
2491 result,
object, map, name, instr->environment());
2492 __ jmp(&done, all_are_compact ? Label::kNear : Label::kFar);
2498 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
2499 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2505 void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) {
2509 __ Move(
rcx, instr->name());
2510 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
2511 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2515 void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) {
2516 Register
function =
ToRegister(instr->function());
2517 Register result =
ToRegister(instr->result());
2521 DeoptimizeIf(
not_equal, instr->environment());
2534 __ CompareRoot(result, Heap::kTheHoleValueRootIndex);
2535 DeoptimizeIf(
equal, instr->environment());
2544 __ jmp(&done, Label::kNear);
2548 __ bind(&non_instance);
2556 void LCodeGen::DoLoadElements(LLoadElements* instr) {
2557 Register result =
ToRegister(instr->result());
2558 Register input =
ToRegister(instr->object());
2560 if (FLAG_debug_code) {
2561 Label done, ok, fail;
2563 Heap::kFixedArrayMapRootIndex);
2564 __ j(
equal, &done, Label::kNear);
2566 Heap::kFixedCOWArrayMapRootIndex);
2567 __ j(
equal, &done, Label::kNear);
2568 Register temp((result.is(
rax)) ?
rbx :
rax);
2575 __ j(
less, &fail, Label::kNear);
2579 __ j(
less, &fail, Label::kNear);
2583 __ Abort(
"Check for fast or external elements failed");
2591 void LCodeGen::DoLoadExternalArrayPointer(
2592 LLoadExternalArrayPointer* instr) {
2593 Register result =
ToRegister(instr->result());
2594 Register input =
ToRegister(instr->object());
2600 void LCodeGen::DoAccessArgumentsAt(LAccessArgumentsAt* instr) {
2601 Register arguments =
ToRegister(instr->arguments());
2602 Register length =
ToRegister(instr->length());
2603 Register result =
ToRegister(instr->result());
2606 if (instr->index()->IsRegister()) {
2609 __ subl(length, ToOperand(instr->index()));
2615 void LCodeGen::DoLoadKeyedFastElement(LLoadKeyedFastElement* instr) {
2616 Register result =
ToRegister(instr->result());
2617 LOperand* key = instr->key();
2618 if (!key->IsConstantOperand()) {
2624 if (instr->hydrogen()->key()->representation().IsTagged()) {
2625 __ SmiToInteger64(key_reg, key_reg);
2626 }
else if (instr->hydrogen()->IsDehoisted()) {
2629 __ movsxlq(key_reg, key_reg);
2635 BuildFastArrayOperand(instr->elements(),
2639 instr->additional_index()));
2642 if (instr->hydrogen()->RequiresHoleCheck()) {
2647 __ CompareRoot(result, Heap::kTheHoleValueRootIndex);
2648 DeoptimizeIf(
equal, instr->environment());
2654 void LCodeGen::DoLoadKeyedFastDoubleElement(
2655 LLoadKeyedFastDoubleElement* instr) {
2656 XMMRegister result(ToDoubleRegister(instr->result()));
2657 LOperand* key = instr->key();
2658 if (!key->IsConstantOperand()) {
2664 if (instr->hydrogen()->key()->representation().IsTagged()) {
2665 __ SmiToInteger64(key_reg, key_reg);
2666 }
else if (instr->hydrogen()->IsDehoisted()) {
2669 __ movsxlq(key_reg, key_reg);
2673 if (instr->hydrogen()->RequiresHoleCheck()) {
2676 Operand hole_check_operand = BuildFastArrayOperand(
2681 instr->additional_index());
2683 DeoptimizeIf(
equal, instr->environment());
2686 Operand double_load_operand = BuildFastArrayOperand(
2691 instr->additional_index());
2692 __ movsd(result, double_load_operand);
2696 Operand LCodeGen::BuildFastArrayOperand(
2697 LOperand* elements_pointer,
2701 uint32_t additional_index) {
2702 Register elements_pointer_reg =
ToRegister(elements_pointer);
2704 if (key->IsConstantOperand()) {
2706 if (constant_value & 0xF0000000) {
2707 Abort(
"array index constant value too big");
2709 return Operand(elements_pointer_reg,
2710 ((constant_value + additional_index) << shift_size)
2714 return Operand(elements_pointer_reg,
2717 offset + (additional_index << shift_size));
2722 void LCodeGen::DoLoadKeyedSpecializedArrayElement(
2723 LLoadKeyedSpecializedArrayElement* instr) {
2725 LOperand* key = instr->key();
2726 if (!key->IsConstantOperand()) {
2732 if (instr->hydrogen()->key()->representation().IsTagged()) {
2733 __ SmiToInteger64(key_reg, key_reg);
2734 }
else if (instr->hydrogen()->IsDehoisted()) {
2737 __ movsxlq(key_reg, key_reg);
2740 Operand operand(BuildFastArrayOperand(
2741 instr->external_pointer(),
2745 instr->additional_index()));
2748 XMMRegister result(ToDoubleRegister(instr->result()));
2749 __ movss(result, operand);
2750 __ cvtss2sd(result, result);
2752 __ movsd(ToDoubleRegister(instr->result()), operand);
2754 Register result(
ToRegister(instr->result()));
2755 switch (elements_kind) {
2757 __ movsxbq(result, operand);
2761 __ movzxbq(result, operand);
2764 __ movsxwq(result, operand);
2767 __ movzxwq(result, operand);
2770 __ movsxlq(result, operand);
2773 __ movl(result, operand);
2775 __ testl(result, result);
2776 DeoptimizeIf(
negative, instr->environment());
2783 case FAST_DOUBLE_ELEMENTS:
2796 void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) {
2800 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
2801 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2805 void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) {
2806 Register result =
ToRegister(instr->result());
2808 if (instr->hydrogen()->from_inlined()) {
2809 __ lea(result, Operand(
rsp, -2 * kPointerSize));
2812 Label done, adapted;
2816 __ j(
equal, &adapted, Label::kNear);
2819 __ movq(result,
rbp);
2820 __ jmp(&done, Label::kNear);
2833 void LCodeGen::DoArgumentsLength(LArgumentsLength* instr) {
2834 Register result =
ToRegister(instr->result());
2839 if (instr->elements()->IsRegister()) {
2842 __ cmpq(
rbp, ToOperand(instr->elements()));
2844 __ movl(result, Immediate(scope()->num_parameters()));
2845 __ j(
equal, &done, Label::kNear);
2849 __ SmiToInteger32(result,
2858 void LCodeGen::DoWrapReceiver(LWrapReceiver* instr) {
2859 Register receiver =
ToRegister(instr->receiver());
2860 Register
function =
ToRegister(instr->function());
2865 Label global_object, receiver_ok;
2883 __ CompareRoot(receiver, Heap::kNullValueRootIndex);
2884 __ j(
equal, &global_object, Label::kNear);
2885 __ CompareRoot(receiver, Heap::kUndefinedValueRootIndex);
2886 __ j(
equal, &global_object, Label::kNear);
2890 DeoptimizeIf(is_smi, instr->environment());
2892 DeoptimizeIf(
below, instr->environment());
2893 __ jmp(&receiver_ok, Label::kNear);
2895 __ bind(&global_object);
2902 __ bind(&receiver_ok);
2906 void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
2907 Register receiver =
ToRegister(instr->receiver());
2908 Register
function =
ToRegister(instr->function());
2909 Register length =
ToRegister(instr->length());
2910 Register elements =
ToRegister(instr->elements());
2917 const uint32_t kArgumentsLimit = 1 *
KB;
2918 __ cmpq(length, Immediate(kArgumentsLimit));
2919 DeoptimizeIf(
above, instr->environment());
2922 __ movq(receiver, length);
2928 __ testl(length, length);
2929 __ j(
zero, &invoke, Label::kNear);
2937 ASSERT(instr->HasPointerMap());
2938 LPointerMap* pointers = instr->pointer_map();
2939 RecordPosition(pointers->position());
2940 SafepointGenerator safepoint_generator(
2941 this, pointers, Safepoint::kLazyDeopt);
2942 ParameterCount actual(
rax);
2949 void LCodeGen::DoPushArgument(LPushArgument* instr) {
2950 LOperand* argument = instr->value();
2951 EmitPushTaggedOperand(argument);
2955 void LCodeGen::DoDrop(LDrop* instr) {
2956 __ Drop(instr->count());
2960 void LCodeGen::DoThisFunction(LThisFunction* instr) {
2961 Register result =
ToRegister(instr->result());
2966 void LCodeGen::DoContext(LContext* instr) {
2967 Register result =
ToRegister(instr->result());
2968 __ movq(result,
rsi);
2972 void LCodeGen::DoOuterContext(LOuterContext* instr) {
2973 Register context =
ToRegister(instr->context());
2974 Register result =
ToRegister(instr->result());
2980 void LCodeGen::DoDeclareGlobals(LDeclareGlobals* instr) {
2982 __ PushHeapObject(instr->hydrogen()->pairs());
2984 CallRuntime(Runtime::kDeclareGlobals, 3, instr);
2988 void LCodeGen::DoGlobalObject(LGlobalObject* instr) {
2989 Register result =
ToRegister(instr->result());
2994 void LCodeGen::DoGlobalReceiver(LGlobalReceiver* instr) {
2995 Register global =
ToRegister(instr->global());
2996 Register result =
ToRegister(instr->result());
3001 void LCodeGen::CallKnownFunction(Handle<JSFunction>
function,
3003 LInstruction* instr,
3005 RDIState rdi_state) {
3006 bool can_invoke_directly = !
function->NeedsArgumentsAdaption() ||
3007 function->shared()->formal_parameter_count() == arity;
3009 LPointerMap* pointers = instr->pointer_map();
3010 RecordPosition(pointers->position());
3012 if (can_invoke_directly) {
3013 if (rdi_state == RDI_UNINITIALIZED) {
3014 __ LoadHeapObject(
rdi,
function);
3022 if (!function->NeedsArgumentsAdaption()) {
3027 __ SetCallKind(
rcx, call_kind);
3028 if (*
function == *info()->closure()) {
3035 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT, 0);
3039 this, pointers, Safepoint::kLazyDeopt);
3040 ParameterCount count(arity);
3049 void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) {
3051 CallKnownFunction(instr->function(),
3059 void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr) {
3060 Register input_reg =
ToRegister(instr->value());
3062 Heap::kHeapNumberMapRootIndex);
3063 DeoptimizeIf(
not_equal, instr->environment());
3066 Register tmp = input_reg.is(
rax) ?
rcx :
rax;
3070 PushSafepointRegistersScope scope(
this);
3084 Label allocated, slow;
3085 __ AllocateHeapNumber(tmp, tmp2, &slow);
3091 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr);
3098 __ LoadFromSafepointRegisterSlot(input_reg, input_reg);
3100 __ bind(&allocated);
3102 __ shl(tmp2, Immediate(1));
3103 __ shr(tmp2, Immediate(1));
3105 __ StoreToSafepointRegisterSlot(input_reg, tmp);
3111 void LCodeGen::EmitIntegerMathAbs(LUnaryMathOperation* instr) {
3112 Register input_reg =
ToRegister(instr->value());
3113 __ testl(input_reg, input_reg);
3117 DeoptimizeIf(negative, instr->environment());
3118 __ bind(&is_positive);
3122 void LCodeGen::DoMathAbs(LUnaryMathOperation* instr) {
3124 class DeferredMathAbsTaggedHeapNumber:
public LDeferredCode {
3126 DeferredMathAbsTaggedHeapNumber(LCodeGen* codegen,
3127 LUnaryMathOperation* instr)
3128 : LDeferredCode(codegen), instr_(instr) { }
3129 virtual void Generate() {
3130 codegen()->DoDeferredMathAbsTaggedHeapNumber(instr_);
3132 virtual LInstruction* instr() {
return instr_; }
3134 LUnaryMathOperation* instr_;
3137 ASSERT(instr->value()->Equals(instr->result()));
3138 Representation r = instr->hydrogen()->value()->representation();
3141 XMMRegister scratch =
xmm0;
3142 XMMRegister input_reg = ToDoubleRegister(instr->value());
3143 __ xorps(scratch, scratch);
3144 __ subsd(scratch, input_reg);
3145 __ andpd(input_reg, scratch);
3146 }
else if (r.IsInteger32()) {
3147 EmitIntegerMathAbs(instr);
3149 DeferredMathAbsTaggedHeapNumber* deferred =
3150 new(zone()) DeferredMathAbsTaggedHeapNumber(
this, instr);
3151 Register input_reg =
ToRegister(instr->value());
3153 __ JumpIfNotSmi(input_reg, deferred->entry());
3154 __ SmiToInteger32(input_reg, input_reg);
3155 EmitIntegerMathAbs(instr);
3156 __ Integer32ToSmi(input_reg, input_reg);
3157 __ bind(deferred->exit());
3162 void LCodeGen::DoMathFloor(LUnaryMathOperation* instr) {
3163 XMMRegister xmm_scratch =
xmm0;
3164 Register output_reg =
ToRegister(instr->result());
3165 XMMRegister input_reg = ToDoubleRegister(instr->value());
3168 CpuFeatures::Scope scope(
SSE4_1);
3171 __ movq(output_reg, input_reg);
3172 __ subq(output_reg, Immediate(1));
3173 DeoptimizeIf(
overflow, instr->environment());
3176 __ cvttsd2si(output_reg, xmm_scratch);
3177 __ cmpl(output_reg, Immediate(0x80000000));
3178 DeoptimizeIf(
equal, instr->environment());
3180 Label negative_sign, done;
3182 __ xorps(xmm_scratch, xmm_scratch);
3183 __ ucomisd(input_reg, xmm_scratch);
3185 __ j(
below, &negative_sign, Label::kNear);
3189 Label positive_sign;
3190 __ j(
above, &positive_sign, Label::kNear);
3191 __ movmskpd(output_reg, input_reg);
3192 __ testq(output_reg, Immediate(1));
3193 DeoptimizeIf(
not_zero, instr->environment());
3194 __ Set(output_reg, 0);
3196 __ bind(&positive_sign);
3200 __ cvttsd2si(output_reg, input_reg);
3202 __ cmpl(output_reg, Immediate(0x80000000));
3203 DeoptimizeIf(
equal, instr->environment());
3204 __ jmp(&done, Label::kNear);
3207 __ bind(&negative_sign);
3209 __ cvttsd2si(output_reg, input_reg);
3210 __ cvtlsi2sd(xmm_scratch, output_reg);
3211 __ ucomisd(input_reg, xmm_scratch);
3212 __ j(
equal, &done, Label::kNear);
3213 __ subl(output_reg, Immediate(1));
3214 DeoptimizeIf(
overflow, instr->environment());
3221 void LCodeGen::DoMathRound(LUnaryMathOperation* instr) {
3222 const XMMRegister xmm_scratch =
xmm0;
3223 Register output_reg =
ToRegister(instr->result());
3224 XMMRegister input_reg = ToDoubleRegister(instr->value());
3231 __ ucomisd(xmm_scratch, input_reg);
3233 __ j(
above, &below_half, Label::kNear);
3238 __ addsd(xmm_scratch, input_reg);
3241 __ cvttsd2si(output_reg, xmm_scratch);
3243 __ cmpl(output_reg, Immediate(0x80000000));
3244 DeoptimizeIf(
equal, instr->environment());
3247 __ bind(&below_half);
3250 __ movq(output_reg, input_reg);
3251 __ testq(output_reg, output_reg);
3252 DeoptimizeIf(negative, instr->environment());
3259 __ ucomisd(input_reg, xmm_scratch);
3260 DeoptimizeIf(
below, instr->environment());
3262 __ xorl(output_reg, output_reg);
3268 void LCodeGen::DoMathSqrt(LUnaryMathOperation* instr) {
3269 XMMRegister input_reg = ToDoubleRegister(instr->value());
3270 ASSERT(ToDoubleRegister(instr->result()).is(input_reg));
3271 __ sqrtsd(input_reg, input_reg);
3275 void LCodeGen::DoMathPowHalf(LUnaryMathOperation* instr) {
3276 XMMRegister xmm_scratch =
xmm0;
3277 XMMRegister input_reg = ToDoubleRegister(instr->value());
3278 ASSERT(ToDoubleRegister(instr->result()).is(input_reg));
3288 __ ucomisd(xmm_scratch, input_reg);
3292 __ j(
carry, &sqrt, Label::kNear);
3294 __ xorps(input_reg, input_reg);
3295 __ subsd(input_reg, xmm_scratch);
3296 __ jmp(&done, Label::kNear);
3300 __ xorps(xmm_scratch, xmm_scratch);
3301 __ addsd(input_reg, xmm_scratch);
3302 __ sqrtsd(input_reg, input_reg);
3307 void LCodeGen::DoPower(LPower* instr) {
3308 Representation exponent_type = instr->hydrogen()->right()->representation();
3314 Register exponent =
rdx;
3316 Register exponent =
rdi;
3318 ASSERT(!instr->right()->IsRegister() ||
3320 ASSERT(!instr->right()->IsDoubleRegister() ||
3321 ToDoubleRegister(instr->right()).is(
xmm1));
3322 ASSERT(ToDoubleRegister(instr->left()).is(
xmm2));
3323 ASSERT(ToDoubleRegister(instr->result()).is(
xmm3));
3325 if (exponent_type.IsTagged()) {
3327 __ JumpIfSmi(exponent, &no_deopt);
3329 DeoptimizeIf(
not_equal, instr->environment());
3333 }
else if (exponent_type.IsInteger32()) {
3337 ASSERT(exponent_type.IsDouble());
3344 void LCodeGen::DoRandom(LRandom* instr) {
3345 class DeferredDoRandom:
public LDeferredCode {
3347 DeferredDoRandom(LCodeGen* codegen, LRandom* instr)
3348 : LDeferredCode(codegen), instr_(instr) { }
3349 virtual void Generate() { codegen()->DoDeferredRandom(instr_); }
3350 virtual LInstruction* instr() {
return instr_; }
3355 DeferredDoRandom* deferred =
new(zone()) DeferredDoRandom(
this, instr);
3359 ASSERT(ToDoubleRegister(instr->result()).is(
xmm1));
3365 Register global_object =
rcx;
3368 Register global_object =
rdi;
3371 static const int kSeedSize =
sizeof(uint32_t);
3374 __ movq(global_object,
3376 static const int kRandomSeedOffset =
3385 __ j(
zero, deferred->entry());
3392 __ andl(
rdx, Immediate(0xFFFF));
3393 __ imull(
rdx,
rdx, Immediate(18273));
3394 __ shrl(
rax, Immediate(16));
3401 __ andl(
rdx, Immediate(0xFFFF));
3402 __ imull(
rdx,
rdx, Immediate(36969));
3403 __ shrl(
rcx, Immediate(16));
3409 __ shll(
rax, Immediate(14));
3410 __ andl(
rcx, Immediate(0x3FFFF));
3413 __ bind(deferred->exit());
3417 __ movl(
rcx, Immediate(0x49800000));
3426 void LCodeGen::DoDeferredRandom(LRandom* instr) {
3427 __ PrepareCallCFunction(1);
3428 __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1);
3434 void LCodeGen::DoMathLog(LUnaryMathOperation* instr) {
3435 ASSERT(ToDoubleRegister(instr->result()).is(
xmm1));
3438 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3442 void LCodeGen::DoMathTan(LUnaryMathOperation* instr) {
3443 ASSERT(ToDoubleRegister(instr->result()).is(
xmm1));
3446 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3450 void LCodeGen::DoMathCos(LUnaryMathOperation* instr) {
3451 ASSERT(ToDoubleRegister(instr->result()).is(
xmm1));
3454 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3458 void LCodeGen::DoMathSin(LUnaryMathOperation* instr) {
3459 ASSERT(ToDoubleRegister(instr->result()).is(
xmm1));
3462 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3466 void LCodeGen::DoUnaryMathOperation(LUnaryMathOperation* instr) {
3467 switch (instr->op()) {
3481 DoMathPowHalf(instr);
3502 void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) {
3504 ASSERT(instr->HasPointerMap());
3506 if (instr->known_function().is_null()) {
3507 LPointerMap* pointers = instr->pointer_map();
3508 RecordPosition(pointers->position());
3509 SafepointGenerator
generator(
this, pointers, Safepoint::kLazyDeopt);
3510 ParameterCount count(instr->arity());
3514 CallKnownFunction(instr->known_function(),
3518 RDI_CONTAINS_TARGET);
3523 void LCodeGen::DoCallKeyed(LCallKeyed* instr) {
3527 int arity = instr->arity();
3529 isolate()->stub_cache()->ComputeKeyedCallInitialize(arity);
3530 CallCode(ic, RelocInfo::CODE_TARGET, instr);
3535 void LCodeGen::DoCallNamed(LCallNamed* instr) {
3538 int arity = instr->arity();
3539 RelocInfo::Mode mode = RelocInfo::CODE_TARGET;
3541 isolate()->stub_cache()->ComputeCallInitialize(arity, mode);
3542 __ Move(
rcx, instr->name());
3543 CallCode(ic, mode, instr);
3548 void LCodeGen::DoCallFunction(LCallFunction* instr) {
3552 int arity = instr->arity();
3554 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3559 void LCodeGen::DoCallGlobal(LCallGlobal* instr) {
3561 int arity = instr->arity();
3562 RelocInfo::Mode mode = RelocInfo::CODE_TARGET_CONTEXT;
3564 isolate()->stub_cache()->ComputeCallInitialize(arity, mode);
3565 __ Move(
rcx, instr->name());
3566 CallCode(ic, mode, instr);
3571 void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) {
3573 CallKnownFunction(instr->target(),
3581 void LCodeGen::DoCallNew(LCallNew* instr) {
3586 __ Set(
rax, instr->arity());
3587 CallCode(stub.GetCode(), RelocInfo::CONSTRUCT_CALL, instr);
3591 void LCodeGen::DoCallRuntime(LCallRuntime* instr) {
3592 CallRuntime(instr->function(), instr->arity(), instr);
3596 void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) {
3597 Register
object =
ToRegister(instr->object());
3599 int offset = instr->offset();
3601 if (!instr->transition().is_null()) {
3602 if (!instr->hydrogen()->NeedsWriteBarrierForMap()) {
3604 instr->transition());
3610 __ RecordWriteField(
object,
3621 HType type = instr->hydrogen()->value()->type();
3624 if (instr->is_in_object()) {
3626 if (instr->hydrogen()->NeedsWriteBarrier()) {
3629 __ RecordWriteField(
object,
3641 if (instr->hydrogen()->NeedsWriteBarrier()) {
3644 __ RecordWriteField(temp,
3656 void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) {
3660 __ Move(
rcx, instr->hydrogen()->name());
3661 Handle<Code> ic = (instr->strict_mode_flag() ==
kStrictMode)
3662 ? isolate()->builtins()->StoreIC_Initialize_Strict()
3663 : isolate()->builtins()->StoreIC_Initialize();
3664 CallCode(ic, RelocInfo::CODE_TARGET, instr);
3668 void LCodeGen::DoStoreKeyedSpecializedArrayElement(
3669 LStoreKeyedSpecializedArrayElement* instr) {
3671 LOperand* key = instr->key();
3672 if (!key->IsConstantOperand()) {
3678 if (instr->hydrogen()->key()->representation().IsTagged()) {
3679 __ SmiToInteger64(key_reg, key_reg);
3680 }
else if (instr->hydrogen()->IsDehoisted()) {
3683 __ movsxlq(key_reg, key_reg);
3686 Operand operand(BuildFastArrayOperand(
3687 instr->external_pointer(),
3691 instr->additional_index()));
3694 XMMRegister value(ToDoubleRegister(instr->value()));
3695 __ cvtsd2ss(value, value);
3696 __ movss(operand, value);
3698 __ movsd(operand, ToDoubleRegister(instr->value()));
3701 switch (elements_kind) {
3705 __ movb(operand, value);
3709 __ movw(operand, value);
3713 __ movl(operand, value);
3719 case FAST_DOUBLE_ELEMENTS:
3732 void LCodeGen::DeoptIfTaggedButNotSmi(LEnvironment* environment,
3734 LOperand* operand) {
3735 if (value->representation().IsTagged() && !value->type().IsSmi()) {
3737 if (operand->IsRegister()) {
3740 cc = masm()->CheckSmi(ToOperand(operand));
3747 void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) {
3748 DeoptIfTaggedButNotSmi(instr->environment(),
3749 instr->hydrogen()->length(),
3751 DeoptIfTaggedButNotSmi(instr->environment(),
3752 instr->hydrogen()->index(),
3754 if (instr->length()->IsRegister()) {
3756 if (!instr->hydrogen()->length()->representation().IsTagged()) {
3757 __ AssertZeroExtended(reg);
3759 if (instr->index()->IsConstantOperand()) {
3760 int constant_index =
3762 if (instr->hydrogen()->length()->representation().IsTagged()) {
3765 __ cmpq(reg, Immediate(constant_index));
3769 if (!instr->hydrogen()->index()->representation().IsTagged()) {
3770 __ AssertZeroExtended(reg2);
3775 Operand length = ToOperand(instr->length());
3776 if (instr->index()->IsConstantOperand()) {
3777 int constant_index =
3779 if (instr->hydrogen()->length()->representation().IsTagged()) {
3782 __ cmpq(length, Immediate(constant_index));
3792 void LCodeGen::DoStoreKeyedFastElement(LStoreKeyedFastElement* instr) {
3794 Register elements =
ToRegister(instr->object());
3795 LOperand* key = instr->key();
3796 if (!key->IsConstantOperand()) {
3802 if (instr->hydrogen()->key()->representation().IsTagged()) {
3803 __ SmiToInteger64(key_reg, key_reg);
3804 }
else if (instr->hydrogen()->IsDehoisted()) {
3807 __ movsxlq(key_reg, key_reg);
3812 BuildFastArrayOperand(instr->object(),
3816 instr->additional_index());
3818 if (instr->hydrogen()->NeedsWriteBarrier()) {
3819 ASSERT(!instr->key()->IsConstantOperand());
3820 HType type = instr->hydrogen()->value()->type();
3825 __ lea(key_reg, operand);
3826 __ movq(Operand(key_reg, 0), value);
3827 __ RecordWrite(elements,
3834 __ movq(operand, value);
3839 void LCodeGen::DoStoreKeyedFastDoubleElement(
3840 LStoreKeyedFastDoubleElement* instr) {
3841 XMMRegister value = ToDoubleRegister(instr->value());
3842 LOperand* key = instr->key();
3843 if (!key->IsConstantOperand()) {
3849 if (instr->hydrogen()->key()->representation().IsTagged()) {
3850 __ SmiToInteger64(key_reg, key_reg);
3851 }
else if (instr->hydrogen()->IsDehoisted()) {
3854 __ movsxlq(key_reg, key_reg);
3858 if (instr->NeedsCanonicalization()) {
3861 __ ucomisd(value, value);
3868 __ bind(&have_value);
3871 Operand double_store_operand = BuildFastArrayOperand(
3876 instr->additional_index());
3878 __ movsd(double_store_operand, value);
3881 void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) {
3886 Handle<Code> ic = (instr->strict_mode_flag() ==
kStrictMode)
3887 ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
3888 : isolate()->builtins()->KeyedStoreIC_Initialize();
3889 CallCode(ic, RelocInfo::CODE_TARGET, instr);
3893 void LCodeGen::DoTransitionElementsKind(LTransitionElementsKind* instr) {
3894 Register object_reg =
ToRegister(instr->object());
3895 Register new_map_reg =
ToRegister(instr->new_map_temp());
3897 Handle<Map> from_map = instr->original_map();
3898 Handle<Map> to_map = instr->transitioned_map();
3902 Label not_applicable;
3905 __ movq(new_map_reg, to_map, RelocInfo::EMBEDDED_OBJECT);
3914 Register fixed_object_reg =
ToRegister(instr->temp());
3917 __ movq(fixed_object_reg, object_reg);
3918 CallCode(isolate()->builtins()->TransitionElementsSmiToDouble(),
3919 RelocInfo::CODE_TARGET, instr);
3922 Register fixed_object_reg =
ToRegister(instr->temp());
3925 __ movq(fixed_object_reg, object_reg);
3926 CallCode(isolate()->builtins()->TransitionElementsDoubleToObject(),
3927 RelocInfo::CODE_TARGET, instr);
3931 __ bind(¬_applicable);
3935 void LCodeGen::DoStringAdd(LStringAdd* instr) {
3936 EmitPushTaggedOperand(instr->left());
3937 EmitPushTaggedOperand(instr->right());
3939 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3943 void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) {
3944 class DeferredStringCharCodeAt:
public LDeferredCode {
3946 DeferredStringCharCodeAt(LCodeGen* codegen, LStringCharCodeAt* instr)
3947 : LDeferredCode(codegen), instr_(instr) { }
3948 virtual void Generate() { codegen()->DoDeferredStringCharCodeAt(instr_); }
3949 virtual LInstruction* instr() {
return instr_; }
3951 LStringCharCodeAt* instr_;
3954 DeferredStringCharCodeAt* deferred =
3955 new(zone()) DeferredStringCharCodeAt(
this, instr);
3962 __ bind(deferred->exit());
3966 void LCodeGen::DoDeferredStringCharCodeAt(LStringCharCodeAt* instr) {
3967 Register
string =
ToRegister(instr->string());
3968 Register result =
ToRegister(instr->result());
3975 PushSafepointRegistersScope scope(
this);
3980 if (instr->index()->IsConstantOperand()) {
3985 __ Integer32ToSmi(index, index);
3988 CallRuntimeFromDeferred(Runtime::kStringCharCodeAt, 2, instr);
3991 __ StoreToSafepointRegisterSlot(result,
rax);
3995 void LCodeGen::DoStringCharFromCode(LStringCharFromCode* instr) {
3996 class DeferredStringCharFromCode:
public LDeferredCode {
3998 DeferredStringCharFromCode(LCodeGen* codegen, LStringCharFromCode* instr)
3999 : LDeferredCode(codegen), instr_(instr) { }
4000 virtual void Generate() { codegen()->DoDeferredStringCharFromCode(instr_); }
4001 virtual LInstruction* instr() {
return instr_; }
4003 LStringCharFromCode* instr_;
4006 DeferredStringCharFromCode* deferred =
4007 new(zone()) DeferredStringCharFromCode(
this, instr);
4009 ASSERT(instr->hydrogen()->value()->representation().IsInteger32());
4010 Register char_code =
ToRegister(instr->char_code());
4011 Register result =
ToRegister(instr->result());
4012 ASSERT(!char_code.is(result));
4015 __ j(
above, deferred->entry());
4016 __ LoadRoot(result, Heap::kSingleCharacterStringCacheRootIndex);
4020 __ CompareRoot(result, Heap::kUndefinedValueRootIndex);
4021 __ j(
equal, deferred->entry());
4022 __ bind(deferred->exit());
4026 void LCodeGen::DoDeferredStringCharFromCode(LStringCharFromCode* instr) {
4027 Register char_code =
ToRegister(instr->char_code());
4028 Register result =
ToRegister(instr->result());
4035 PushSafepointRegistersScope scope(
this);
4036 __ Integer32ToSmi(char_code, char_code);
4038 CallRuntimeFromDeferred(Runtime::kCharFromCode, 1, instr);
4039 __ StoreToSafepointRegisterSlot(result,
rax);
4043 void LCodeGen::DoStringLength(LStringLength* instr) {
4044 Register
string =
ToRegister(instr->string());
4045 Register result =
ToRegister(instr->result());
4050 void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) {
4051 LOperand* input = instr->value();
4052 ASSERT(input->IsRegister() || input->IsStackSlot());
4053 LOperand* output = instr->result();
4054 ASSERT(output->IsDoubleRegister());
4055 if (input->IsRegister()) {
4056 __ cvtlsi2sd(ToDoubleRegister(output),
ToRegister(input));
4058 __ cvtlsi2sd(ToDoubleRegister(output), ToOperand(input));
4063 void LCodeGen::DoUint32ToDouble(LUint32ToDouble* instr) {
4064 LOperand* input = instr->value();
4065 LOperand* output = instr->result();
4066 LOperand* temp = instr->temp();
4068 __ LoadUint32(ToDoubleRegister(output),
4070 ToDoubleRegister(temp));
4074 void LCodeGen::DoNumberTagI(LNumberTagI* instr) {
4075 LOperand* input = instr->value();
4076 ASSERT(input->IsRegister() && input->Equals(instr->result()));
4079 __ Integer32ToSmi(reg, reg);
4083 void LCodeGen::DoNumberTagU(LNumberTagU* instr) {
4084 class DeferredNumberTagU:
public LDeferredCode {
4086 DeferredNumberTagU(LCodeGen* codegen, LNumberTagU* instr)
4087 : LDeferredCode(codegen), instr_(instr) { }
4088 virtual void Generate() {
4089 codegen()->DoDeferredNumberTagU(instr_);
4091 virtual LInstruction* instr() {
return instr_; }
4093 LNumberTagU* instr_;
4096 LOperand* input = instr->value();
4097 ASSERT(input->IsRegister() && input->Equals(instr->result()));
4100 DeferredNumberTagU* deferred =
new(zone()) DeferredNumberTagU(
this, instr);
4102 __ j(
above, deferred->entry());
4103 __ Integer32ToSmi(reg, reg);
4104 __ bind(deferred->exit());
4108 void LCodeGen::DoDeferredNumberTagU(LNumberTagU* instr) {
4114 PushSafepointRegistersScope scope(
this);
4122 if (FLAG_inline_new) {
4123 __ AllocateHeapNumber(reg, tmp, &slow);
4124 __ jmp(&done, Label::kNear);
4133 __ StoreToSafepointRegisterSlot(reg, Immediate(0));
4135 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr);
4136 if (!reg.is(
rax))
__ movq(reg,
rax);
4142 __ StoreToSafepointRegisterSlot(reg, reg);
4146 void LCodeGen::DoNumberTagD(LNumberTagD* instr) {
4147 class DeferredNumberTagD:
public LDeferredCode {
4149 DeferredNumberTagD(LCodeGen* codegen, LNumberTagD* instr)
4150 : LDeferredCode(codegen), instr_(instr) { }
4151 virtual void Generate() { codegen()->DoDeferredNumberTagD(instr_); }
4152 virtual LInstruction* instr() {
return instr_; }
4154 LNumberTagD* instr_;
4157 XMMRegister input_reg = ToDoubleRegister(instr->value());
4161 DeferredNumberTagD* deferred =
new(zone()) DeferredNumberTagD(
this, instr);
4162 if (FLAG_inline_new) {
4163 __ AllocateHeapNumber(reg, tmp, deferred->entry());
4165 __ jmp(deferred->entry());
4167 __ bind(deferred->exit());
4172 void LCodeGen::DoDeferredNumberTagD(LNumberTagD* instr) {
4180 PushSafepointRegistersScope scope(
this);
4181 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr);
4189 void LCodeGen::DoSmiTag(LSmiTag* instr) {
4190 ASSERT(instr->value()->Equals(instr->result()));
4193 __ Integer32ToSmi(input, input);
4197 void LCodeGen::DoSmiUntag(LSmiUntag* instr) {
4198 ASSERT(instr->value()->Equals(instr->result()));
4200 if (instr->needs_check()) {
4204 __ AssertSmi(input);
4206 __ SmiToInteger32(input, input);
4210 void LCodeGen::EmitNumberUntagD(Register input_reg,
4211 XMMRegister result_reg,
4212 bool deoptimize_on_undefined,
4213 bool deoptimize_on_minus_zero,
4214 LEnvironment* env) {
4215 Label load_smi, done;
4218 __ JumpIfSmi(input_reg, &load_smi, Label::kNear);
4222 Heap::kHeapNumberMapRootIndex);
4223 if (deoptimize_on_undefined) {
4227 __ j(
equal, &heap_number, Label::kNear);
4229 __ CompareRoot(input_reg, Heap::kUndefinedValueRootIndex);
4233 __ xorps(result_reg, result_reg);
4234 __ divsd(result_reg, result_reg);
4235 __ jmp(&done, Label::kNear);
4237 __ bind(&heap_number);
4241 if (deoptimize_on_minus_zero) {
4242 XMMRegister xmm_scratch =
xmm0;
4243 __ xorps(xmm_scratch, xmm_scratch);
4244 __ ucomisd(xmm_scratch, result_reg);
4250 __ jmp(&done, Label::kNear);
4260 void LCodeGen::DoDeferredTaggedToI(LTaggedToI* instr) {
4261 Label done, heap_number;
4262 Register input_reg =
ToRegister(instr->value());
4266 Heap::kHeapNumberMapRootIndex);
4268 if (instr->truncating()) {
4269 __ j(
equal, &heap_number, Label::kNear);
4272 __ CompareRoot(input_reg, Heap::kUndefinedValueRootIndex);
4273 DeoptimizeIf(
not_equal, instr->environment());
4274 __ Set(input_reg, 0);
4275 __ jmp(&done, Label::kNear);
4277 __ bind(&heap_number);
4280 __ cvttsd2siq(input_reg,
xmm0);
4283 DeoptimizeIf(
equal, instr->environment());
4286 DeoptimizeIf(
not_equal, instr->environment());
4288 XMMRegister xmm_temp = ToDoubleRegister(instr->temp());
4290 __ cvttsd2si(input_reg,
xmm0);
4291 __ cvtlsi2sd(xmm_temp, input_reg);
4292 __ ucomisd(
xmm0, xmm_temp);
4293 DeoptimizeIf(
not_equal, instr->environment());
4296 __ testl(input_reg, input_reg);
4298 __ movmskpd(input_reg,
xmm0);
4299 __ andl(input_reg, Immediate(1));
4300 DeoptimizeIf(
not_zero, instr->environment());
4307 void LCodeGen::DoTaggedToI(LTaggedToI* instr) {
4308 class DeferredTaggedToI:
public LDeferredCode {
4310 DeferredTaggedToI(LCodeGen* codegen, LTaggedToI* instr)
4311 : LDeferredCode(codegen), instr_(instr) { }
4312 virtual void Generate() { codegen()->DoDeferredTaggedToI(instr_); }
4313 virtual LInstruction* instr() {
return instr_; }
4318 LOperand* input = instr->value();
4319 ASSERT(input->IsRegister());
4320 ASSERT(input->Equals(instr->result()));
4323 DeferredTaggedToI* deferred =
new(zone()) DeferredTaggedToI(
this, instr);
4324 __ JumpIfNotSmi(input_reg, deferred->entry());
4325 __ SmiToInteger32(input_reg, input_reg);
4326 __ bind(deferred->exit());
4330 void LCodeGen::DoNumberUntagD(LNumberUntagD* instr) {
4331 LOperand* input = instr->value();
4332 ASSERT(input->IsRegister());
4333 LOperand* result = instr->result();
4334 ASSERT(result->IsDoubleRegister());
4337 XMMRegister result_reg = ToDoubleRegister(result);
4339 EmitNumberUntagD(input_reg, result_reg,
4340 instr->hydrogen()->deoptimize_on_undefined(),
4341 instr->hydrogen()->deoptimize_on_minus_zero(),
4342 instr->environment());
4346 void LCodeGen::DoDoubleToI(LDoubleToI* instr) {
4347 LOperand* input = instr->value();
4348 ASSERT(input->IsDoubleRegister());
4349 LOperand* result = instr->result();
4350 ASSERT(result->IsRegister());
4352 XMMRegister input_reg = ToDoubleRegister(input);
4355 if (instr->truncating()) {
4358 __ cvttsd2siq(result_reg, input_reg);
4361 DeoptimizeIf(
equal, instr->environment());
4363 __ cvttsd2si(result_reg, input_reg);
4364 __ cvtlsi2sd(
xmm0, result_reg);
4365 __ ucomisd(
xmm0, input_reg);
4366 DeoptimizeIf(
not_equal, instr->environment());
4372 __ testl(result_reg, result_reg);
4374 __ movmskpd(result_reg, input_reg);
4378 __ andl(result_reg, Immediate(1));
4379 DeoptimizeIf(
not_zero, instr->environment());
4386 void LCodeGen::DoCheckSmi(LCheckSmi* instr) {
4387 LOperand* input = instr->value();
4393 void LCodeGen::DoCheckNonSmi(LCheckNonSmi* instr) {
4394 LOperand* input = instr->value();
4396 DeoptimizeIf(cc, instr->environment());
4400 void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) {
4405 if (instr->hydrogen()->is_interval_check()) {
4408 instr->hydrogen()->GetCheckInterval(&first, &last);
4411 Immediate(static_cast<int8_t>(first)));
4414 if (first == last) {
4415 DeoptimizeIf(
not_equal, instr->environment());
4417 DeoptimizeIf(
below, instr->environment());
4421 Immediate(static_cast<int8_t>(last)));
4422 DeoptimizeIf(
above, instr->environment());
4428 instr->hydrogen()->GetCheckMaskAndTag(&mask, &tag);
4434 DeoptimizeIf(tag == 0 ?
not_zero :
zero, instr->environment());
4440 DeoptimizeIf(
not_equal, instr->environment());
4446 void LCodeGen::DoCheckFunction(LCheckFunction* instr) {
4448 Handle<JSFunction> target = instr->hydrogen()->target();
4449 if (isolate()->heap()->InNewSpace(*target)) {
4450 Handle<JSGlobalPropertyCell> cell =
4451 isolate()->factory()->NewJSGlobalPropertyCell(target);
4455 __ Cmp(reg, target);
4457 DeoptimizeIf(
not_equal, instr->environment());
4461 void LCodeGen::DoCheckMapCommon(Register reg,
4464 LEnvironment* env) {
4466 __ CompareMap(reg, map, &success, mode);
4472 void LCodeGen::DoCheckMaps(LCheckMaps* instr) {
4473 LOperand* input = instr->value();
4474 ASSERT(input->IsRegister());
4478 SmallMapList* map_set = instr->hydrogen()->map_set();
4479 for (
int i = 0; i < map_set->length() - 1; i++) {
4480 Handle<Map> map = map_set->at(i);
4484 Handle<Map> map = map_set->last();
4490 void LCodeGen::DoClampDToUint8(LClampDToUint8* instr) {
4491 XMMRegister value_reg = ToDoubleRegister(instr->unclamped());
4492 Register result_reg =
ToRegister(instr->result());
4493 __ ClampDoubleToUint8(value_reg,
xmm0, result_reg);
4497 void LCodeGen::DoClampIToUint8(LClampIToUint8* instr) {
4498 ASSERT(instr->unclamped()->Equals(instr->result()));
4499 Register value_reg =
ToRegister(instr->result());
4500 __ ClampUint8(value_reg);
4504 void LCodeGen::DoClampTToUint8(LClampTToUint8* instr) {
4505 ASSERT(instr->unclamped()->Equals(instr->result()));
4506 Register input_reg =
ToRegister(instr->unclamped());
4507 XMMRegister temp_xmm_reg = ToDoubleRegister(instr->temp_xmm());
4508 Label is_smi, done, heap_number;
4510 __ JumpIfSmi(input_reg, &is_smi);
4514 factory()->heap_number_map());
4515 __ j(
equal, &heap_number, Label::kNear);
4519 __ Cmp(input_reg, factory()->undefined_value());
4520 DeoptimizeIf(
not_equal, instr->environment());
4521 __ movq(input_reg, Immediate(0));
4522 __ jmp(&done, Label::kNear);
4525 __ bind(&heap_number);
4527 __ ClampDoubleToUint8(
xmm0, temp_xmm_reg, input_reg);
4528 __ jmp(&done, Label::kNear);
4532 __ SmiToInteger32(input_reg, input_reg);
4533 __ ClampUint8(input_reg);
4539 void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) {
4542 Handle<JSObject> holder = instr->holder();
4543 Handle<JSObject> current_prototype = instr->prototype();
4546 __ LoadHeapObject(reg, current_prototype);
4549 while (!current_prototype.is_identical_to(holder)) {
4550 DoCheckMapCommon(reg, Handle<Map>(current_prototype->map()),
4553 Handle<JSObject>(
JSObject::cast(current_prototype->GetPrototype()));
4555 __ LoadHeapObject(reg, current_prototype);
4559 DoCheckMapCommon(reg, Handle<Map>(current_prototype->map()),
4564 void LCodeGen::DoAllocateObject(LAllocateObject* instr) {
4565 class DeferredAllocateObject:
public LDeferredCode {
4567 DeferredAllocateObject(LCodeGen* codegen, LAllocateObject* instr)
4568 : LDeferredCode(codegen), instr_(instr) { }
4569 virtual void Generate() { codegen()->DoDeferredAllocateObject(instr_); }
4570 virtual LInstruction* instr() {
return instr_; }
4572 LAllocateObject* instr_;
4575 DeferredAllocateObject* deferred =
4576 new(zone()) DeferredAllocateObject(
this, instr);
4578 Register result =
ToRegister(instr->result());
4579 Register scratch =
ToRegister(instr->temp());
4580 Handle<JSFunction> constructor = instr->hydrogen()->constructor();
4581 Handle<Map> initial_map(constructor->initial_map());
4582 int instance_size = initial_map->instance_size();
4583 ASSERT(initial_map->pre_allocated_property_fields() +
4584 initial_map->unused_property_fields() -
4585 initial_map->inobject_properties() == 0);
4590 ASSERT(!constructor->shared()->IsInobjectSlackTrackingInProgress());
4591 __ AllocateInNewSpace(instance_size,
4598 __ bind(deferred->exit());
4599 if (FLAG_debug_code) {
4600 Label is_in_new_space;
4601 __ JumpIfInNewSpace(result, scratch, &is_in_new_space);
4602 __ Abort(
"Allocated object is not in new-space");
4603 __ bind(&is_in_new_space);
4607 Register map = scratch;
4608 __ LoadHeapObject(scratch, constructor);
4611 if (FLAG_debug_code) {
4612 __ AssertNotSmi(map);
4615 __ Assert(
equal,
"Unexpected instance size");
4617 Immediate(initial_map->pre_allocated_property_fields()));
4618 __ Assert(
equal,
"Unexpected pre-allocated property fields count");
4620 Immediate(initial_map->unused_property_fields()));
4621 __ Assert(
equal,
"Unexpected unused property fields count");
4623 Immediate(initial_map->inobject_properties()));
4624 __ Assert(
equal,
"Unexpected in-object property fields count");
4630 __ LoadRoot(scratch, Heap::kEmptyFixedArrayRootIndex);
4633 if (initial_map->inobject_properties() != 0) {
4634 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
4635 for (
int i = 0; i < initial_map->inobject_properties(); i++) {
4643 void LCodeGen::DoDeferredAllocateObject(LAllocateObject* instr) {
4644 Register result =
ToRegister(instr->result());
4645 Handle<JSFunction> constructor = instr->hydrogen()->constructor();
4646 Handle<Map> initial_map(constructor->initial_map());
4647 int instance_size = initial_map->instance_size();
4654 PushSafepointRegistersScope scope(
this);
4656 CallRuntimeFromDeferred(Runtime::kAllocateInNewSpace, 1, instr);
4657 __ StoreToSafepointRegisterSlot(result,
rax);
4661 void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
4662 Handle<FixedArray> literals(instr->environment()->closure()->literals());
4664 instr->hydrogen()->boilerplate_elements_kind();
4670 boilerplate_elements_kind,
true)) {
4671 __ LoadHeapObject(
rax, instr->hydrogen()->boilerplate_object());
4677 __ cmpb(
rbx, Immediate(boilerplate_elements_kind <<
4679 DeoptimizeIf(
not_equal, instr->environment());
4683 __ PushHeapObject(literals);
4687 __ Push(isolate()->factory()->empty_fixed_array());
4690 int length = instr->hydrogen()->length();
4691 if (instr->hydrogen()->IsCopyOnWrite()) {
4692 ASSERT(instr->hydrogen()->depth() == 1);
4695 FastCloneShallowArrayStub stub(mode, length);
4696 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
4697 }
else if (instr->hydrogen()->depth() > 1) {
4698 CallRuntime(Runtime::kCreateArrayLiteral, 3, instr);
4700 CallRuntime(Runtime::kCreateArrayLiteralShallow, 3, instr);
4703 boilerplate_elements_kind == FAST_DOUBLE_ELEMENTS
4706 FastCloneShallowArrayStub stub(mode, length);
4707 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
4712 void LCodeGen::EmitDeepCopy(Handle<JSObject>
object,
4720 Handle<FixedArrayBase> elements(object->elements());
4721 bool has_elements = elements->length() > 0 &&
4722 elements->map() != isolate()->heap()->fixed_cow_array_map();
4726 int object_offset = *offset;
4727 int object_size =
object->map()->instance_size();
4728 int elements_offset = *offset + object_size;
4729 int elements_size = has_elements ? elements->Size() : 0;
4730 *offset += object_size + elements_size;
4733 ASSERT(object->properties()->length() == 0);
4734 int inobject_properties =
object->map()->inobject_properties();
4735 int header_size = object_size - inobject_properties *
kPointerSize;
4738 __ lea(
rcx, Operand(result, elements_offset));
4746 for (
int i = 0; i < inobject_properties; i++) {
4747 int total_offset = object_offset +
object->GetInObjectPropertyOffset(i);
4748 Handle<Object> value = Handle<Object>(
object->InObjectPropertyAt(i));
4749 if (value->IsJSObject()) {
4751 __ lea(
rcx, Operand(result, *offset));
4753 __ LoadHeapObject(source, value_object);
4754 EmitDeepCopy(value_object, result, source, offset);
4755 }
else if (value->IsHeapObject()) {
4756 __ LoadHeapObject(
rcx, Handle<HeapObject>::cast(value));
4766 __ LoadHeapObject(source, elements);
4773 int elements_length = elements->length();
4774 if (elements->IsFixedDoubleArray()) {
4775 Handle<FixedDoubleArray> double_array =
4777 for (
int i = 0; i < elements_length; i++) {
4778 int64_t value = double_array->get_representation(i);
4784 }
else if (elements->IsFixedArray()) {
4786 for (
int i = 0; i < elements_length; i++) {
4788 Handle<Object> value(fast_elements->get(i));
4789 if (value->IsJSObject()) {
4791 __ lea(
rcx, Operand(result, *offset));
4793 __ LoadHeapObject(source, value_object);
4794 EmitDeepCopy(value_object, result, source, offset);
4795 }
else if (value->IsHeapObject()) {
4796 __ LoadHeapObject(
rcx, Handle<HeapObject>::cast(value));
4810 void LCodeGen::DoFastLiteral(LFastLiteral* instr) {
4811 int size = instr->hydrogen()->total_size();
4813 instr->hydrogen()->boilerplate()->GetElementsKind();
4819 boilerplate_elements_kind,
true)) {
4820 __ LoadHeapObject(
rbx, instr->hydrogen()->boilerplate());
4826 __ cmpb(
rcx, Immediate(boilerplate_elements_kind <<
4828 DeoptimizeIf(
not_equal, instr->environment());
4833 Label allocated, runtime_allocate;
4837 __ bind(&runtime_allocate);
4839 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr);
4841 __ bind(&allocated);
4843 __ LoadHeapObject(
rbx, instr->hydrogen()->boilerplate());
4844 EmitDeepCopy(instr->hydrogen()->boilerplate(),
rax,
rbx, &offset);
4849 void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) {
4850 Handle<FixedArray> literals(instr->environment()->closure()->literals());
4851 Handle<FixedArray> constant_properties =
4852 instr->hydrogen()->constant_properties();
4855 __ PushHeapObject(literals);
4857 __ Push(constant_properties);
4858 int flags = instr->hydrogen()->fast_elements()
4861 flags |= instr->hydrogen()->has_function()
4867 int properties_count = constant_properties->length() / 2;
4868 if (instr->hydrogen()->depth() > 1) {
4869 CallRuntime(Runtime::kCreateObjectLiteral, 4, instr);
4872 CallRuntime(Runtime::kCreateObjectLiteralShallow, 4, instr);
4874 FastCloneShallowObjectStub stub(properties_count);
4875 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
4880 void LCodeGen::DoToFastProperties(LToFastProperties* instr) {
4883 CallRuntime(Runtime::kToFastProperties, 1, instr);
4887 void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) {
4893 int literal_offset =
4895 __ LoadHeapObject(
rcx, instr->hydrogen()->literals());
4897 __ CompareRoot(
rbx, Heap::kUndefinedValueRootIndex);
4904 __ Push(instr->hydrogen()->pattern());
4905 __ Push(instr->hydrogen()->flags());
4906 CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr);
4909 __ bind(&materialized);
4911 Label allocated, runtime_allocate;
4915 __ bind(&runtime_allocate);
4918 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr);
4921 __ bind(&allocated);
4930 if ((size % (2 * kPointerSize)) != 0) {
4937 void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) {
4940 Handle<SharedFunctionInfo> shared_info = instr->shared_info();
4941 bool pretenure = instr->hydrogen()->pretenure();
4942 if (!pretenure && shared_info->num_literals() == 0) {
4943 FastNewClosureStub stub(shared_info->language_mode());
4944 __ Push(shared_info);
4945 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
4948 __ Push(shared_info);
4949 __ PushRoot(pretenure ?
4950 Heap::kTrueValueRootIndex :
4951 Heap::kFalseValueRootIndex);
4952 CallRuntime(Runtime::kNewClosure, 3, instr);
4957 void LCodeGen::DoTypeof(LTypeof* instr) {
4958 LOperand* input = instr->value();
4959 EmitPushTaggedOperand(input);
4960 CallRuntime(Runtime::kTypeof, 1, instr);
4964 void LCodeGen::EmitPushTaggedOperand(LOperand* operand) {
4965 ASSERT(!operand->IsDoubleRegister());
4966 if (operand->IsConstantOperand()) {
4968 if (object->IsSmi()) {
4969 __ Push(Handle<Smi>::cast(
object));
4971 __ PushHeapObject(Handle<HeapObject>::cast(
object));
4973 }
else if (operand->IsRegister()) {
4976 __ push(ToOperand(operand));
4981 void LCodeGen::DoTypeofIsAndBranch(LTypeofIsAndBranch* instr) {
4983 int true_block = chunk_->LookupDestination(instr->true_block_id());
4984 int false_block = chunk_->LookupDestination(instr->false_block_id());
4985 Label* true_label = chunk_->GetAssemblyLabel(true_block);
4986 Label* false_label = chunk_->GetAssemblyLabel(false_block);
4989 EmitTypeofIs(true_label, false_label, input, instr->type_literal());
4991 EmitBranch(true_block, false_block, final_branch_condition);
4996 Condition LCodeGen::EmitTypeofIs(Label* true_label,
4999 Handle<String> type_name) {
5001 if (type_name->Equals(heap()->number_symbol())) {
5002 __ JumpIfSmi(input, true_label);
5004 Heap::kHeapNumberMapRootIndex);
5006 final_branch_condition =
equal;
5008 }
else if (type_name->Equals(heap()->string_symbol())) {
5009 __ JumpIfSmi(input, false_label);
5014 final_branch_condition =
zero;
5016 }
else if (type_name->Equals(heap()->boolean_symbol())) {
5017 __ CompareRoot(input, Heap::kTrueValueRootIndex);
5019 __ CompareRoot(input, Heap::kFalseValueRootIndex);
5020 final_branch_condition =
equal;
5022 }
else if (FLAG_harmony_typeof && type_name->Equals(heap()->null_symbol())) {
5023 __ CompareRoot(input, Heap::kNullValueRootIndex);
5024 final_branch_condition =
equal;
5026 }
else if (type_name->Equals(heap()->undefined_symbol())) {
5027 __ CompareRoot(input, Heap::kUndefinedValueRootIndex);
5029 __ JumpIfSmi(input, false_label);
5036 }
else if (type_name->Equals(heap()->function_symbol())) {
5038 __ JumpIfSmi(input, false_label);
5042 final_branch_condition =
equal;
5044 }
else if (type_name->Equals(heap()->object_symbol())) {
5045 __ JumpIfSmi(input, false_label);
5046 if (!FLAG_harmony_typeof) {
5047 __ CompareRoot(input, Heap::kNullValueRootIndex);
5057 final_branch_condition =
zero;
5060 __ jmp(false_label);
5063 return final_branch_condition;
5067 void LCodeGen::DoIsConstructCallAndBranch(LIsConstructCallAndBranch* instr) {
5069 int true_block = chunk_->LookupDestination(instr->true_block_id());
5070 int false_block = chunk_->LookupDestination(instr->false_block_id());
5072 EmitIsConstructCall(temp);
5073 EmitBranch(true_block, false_block,
equal);
5077 void LCodeGen::EmitIsConstructCall(Register temp) {
5082 Label check_frame_marker;
5085 __ j(
not_equal, &check_frame_marker, Label::kNear);
5089 __ bind(&check_frame_marker);
5095 void LCodeGen::EnsureSpaceForLazyDeopt(
int space_needed) {
5098 int current_pc = masm()->pc_offset();
5099 if (current_pc < last_lazy_deopt_pc_ + space_needed) {
5100 int padding_size = last_lazy_deopt_pc_ + space_needed - current_pc;
5101 __ Nop(padding_size);
5106 void LCodeGen::DoLazyBailout(LLazyBailout* instr) {
5108 last_lazy_deopt_pc_ = masm()->pc_offset();
5109 ASSERT(instr->HasEnvironment());
5110 LEnvironment* env = instr->environment();
5111 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
5112 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
5116 void LCodeGen::DoDeoptimize(LDeoptimize* instr) {
5121 void LCodeGen::DoDeleteProperty(LDeleteProperty* instr) {
5122 LOperand* obj = instr->object();
5123 LOperand* key = instr->key();
5124 EmitPushTaggedOperand(obj);
5125 EmitPushTaggedOperand(key);
5126 ASSERT(instr->HasPointerMap());
5127 LPointerMap* pointers = instr->pointer_map();
5128 RecordPosition(pointers->position());
5132 SafepointGenerator safepoint_generator(
5133 this, pointers, Safepoint::kLazyDeopt);
5135 __ InvokeBuiltin(Builtins::DELETE,
CALL_FUNCTION, safepoint_generator);
5139 void LCodeGen::DoIn(LIn* instr) {
5140 LOperand* obj = instr->object();
5141 LOperand* key = instr->key();
5142 EmitPushTaggedOperand(key);
5143 EmitPushTaggedOperand(obj);
5144 ASSERT(instr->HasPointerMap());
5145 LPointerMap* pointers = instr->pointer_map();
5146 RecordPosition(pointers->position());
5147 SafepointGenerator safepoint_generator(
5148 this, pointers, Safepoint::kLazyDeopt);
5153 void LCodeGen::DoDeferredStackCheck(LStackCheck* instr) {
5154 PushSafepointRegistersScope scope(
this);
5156 __ CallRuntimeSaveDoubles(Runtime::kStackGuard);
5157 RecordSafepointWithLazyDeopt(instr, RECORD_SAFEPOINT_WITH_REGISTERS, 0);
5158 ASSERT(instr->HasEnvironment());
5159 LEnvironment* env = instr->environment();
5160 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
5164 void LCodeGen::DoStackCheck(LStackCheck* instr) {
5165 class DeferredStackCheck:
public LDeferredCode {
5167 DeferredStackCheck(LCodeGen* codegen, LStackCheck* instr)
5168 : LDeferredCode(codegen), instr_(instr) { }
5169 virtual void Generate() { codegen()->DoDeferredStackCheck(instr_); }
5170 virtual LInstruction* instr() {
return instr_; }
5172 LStackCheck* instr_;
5175 ASSERT(instr->HasEnvironment());
5176 LEnvironment* env = instr->environment();
5179 if (instr->hydrogen()->is_function_entry()) {
5182 __ CompareRoot(
rsp, Heap::kStackLimitRootIndex);
5184 StackCheckStub stub;
5185 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
5187 last_lazy_deopt_pc_ = masm()->pc_offset();
5189 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
5190 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
5192 ASSERT(instr->hydrogen()->is_backwards_branch());
5194 DeferredStackCheck* deferred_stack_check =
5195 new(zone()) DeferredStackCheck(
this, instr);
5196 __ CompareRoot(
rsp, Heap::kStackLimitRootIndex);
5197 __ j(
below, deferred_stack_check->entry());
5199 last_lazy_deopt_pc_ = masm()->pc_offset();
5200 __ bind(instr->done_label());
5201 deferred_stack_check->SetExit(instr->done_label());
5202 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
5210 void LCodeGen::DoOsrEntry(LOsrEntry* instr) {
5214 LEnvironment* environment = instr->environment();
5215 environment->SetSpilledRegisters(instr->SpilledRegisterArray(),
5216 instr->SpilledDoubleRegisterArray());
5220 ASSERT(!environment->HasBeenRegistered());
5221 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt);
5222 ASSERT(osr_pc_offset_ == -1);
5223 osr_pc_offset_ = masm()->pc_offset();
5227 void LCodeGen::DoForInPrepareMap(LForInPrepareMap* instr) {
5228 __ CompareRoot(
rax, Heap::kUndefinedValueRootIndex);
5229 DeoptimizeIf(
equal, instr->environment());
5231 Register null_value =
rdi;
5232 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
5233 __ cmpq(
rax, null_value);
5234 DeoptimizeIf(
equal, instr->environment());
5237 DeoptimizeIf(cc, instr->environment());
5243 Label use_cache, call_runtime;
5244 __ CheckEnumCache(null_value, &call_runtime);
5247 __ jmp(&use_cache, Label::kNear);
5250 __ bind(&call_runtime);
5252 CallRuntime(Runtime::kGetPropertyNamesFast, 1, instr);
5255 Heap::kMetaMapRootIndex);
5256 DeoptimizeIf(
not_equal, instr->environment());
5257 __ bind(&use_cache);
5261 void LCodeGen::DoForInCacheArray(LForInCacheArray* instr) {
5263 Register result =
ToRegister(instr->result());
5264 Label load_cache, done;
5265 __ EnumLength(result, map);
5268 __ LoadRoot(result, Heap::kEmptyFixedArrayRootIndex);
5270 __ bind(&load_cache);
5271 __ LoadInstanceDescriptors(map, result);
5277 Condition cc = masm()->CheckSmi(result);
5278 DeoptimizeIf(cc, instr->environment());
5282 void LCodeGen::DoCheckMapValue(LCheckMapValue* instr) {
5283 Register
object =
ToRegister(instr->value());
5286 DeoptimizeIf(
not_equal, instr->environment());
5290 void LCodeGen::DoLoadFieldByIndex(LLoadFieldByIndex* instr) {
5291 Register
object =
ToRegister(instr->object());
5294 Label out_of_object, done;
5295 __ SmiToInteger32(index, index);
5296 __ cmpl(index, Immediate(0));
5297 __ j(
less, &out_of_object);
5302 __ jmp(&done, Label::kNear);
5304 __ bind(&out_of_object);
5311 FixedArray::kHeaderSize - kPointerSize));
5320 #endif // V8_TARGET_ARCH_X64
static const int kCallerFPOffset
static const int kElementsKindMask
static const int kLengthOffset
static const int kBitFieldOffset
static LGap * cast(LInstruction *instr)
const intptr_t kSmiTagMask
static const int kCodeEntryOffset
static const int kMaxAsciiCharCode
static const int kPrototypeOrInitialMapOffset
static int SlotOffset(int index)
virtual void AfterCall() const
static const int kEnumCacheOffset
static Smi * FromInt(int value)
bool IsFastObjectElementsKind(ElementsKind kind)
static HeapObject * cast(Object *obj)
static Handle< T > cast(Handle< S > that)
static const int kGlobalReceiverOffset
static const int kNativeByteOffset
static XMMRegister FromAllocationIndex(int index)
static bool IsSupported(CpuFeature f)
static const int kStrictModeBitWithinByte
static const int kExternalPointerOffset
virtual ~SafepointGenerator()
static const int kCallerSPOffset
#define ASSERT(condition)
bool CanTransitionToMoreGeneralFastElementsKind(ElementsKind elements_kind, bool allow_only_packed)
const int kPointerSizeLog2
static const int kInstanceSizeOffset
static const int kInObjectFieldCount
static const int kMaximumSlots
MemOperand GlobalObjectOperand()
static const int kInstanceClassNameOffset
static const int kUnusedPropertyFieldsOffset
int WhichPowerOf2(uint32_t x)
bool is_uint32(int64_t x)
bool IsSimpleMapChangeTransition(ElementsKind from_kind, ElementsKind to_kind)
MemOperand ContextOperand(Register context, int index)
static const int kContextOffset
Handle< String > SubString(Handle< String > str, int start, int end, PretenureFlag pretenure)
static const int kHashFieldOffset
Condition ReverseCondition(Condition cond)
const uint32_t kSlotsZapValue
STATIC_ASSERT((FixedDoubleArray::kHeaderSize &kDoubleAlignmentMask)==0)
static const int kLengthOffset
static const int kValueOffset
const uint32_t kHoleNanUpper32
static void MaybeCallEntryHook(MacroAssembler *masm)
Operand FieldOperand(Register object, int offset)
static LConstantOperand * cast(LOperand *op)
const uint32_t kHoleNanLower32
static Register FromAllocationIndex(int index)
static const int kCacheStampOffset
static const int kPropertiesOffset
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print more details following each garbage collection print amount of external allocated memory after each time it is adjusted flush code that we expect not to use again before full gc do incremental marking steps track object counts and memory usage use caching Perform compaction on every full GC Never perform compaction on full GC testing only Compact code space on full incremental collections Default seed for initializing random generator(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer
static const int kInObjectPropertiesOffset
bool IsFastSmiElementsKind(ElementsKind kind)
static int OffsetOfElementAt(int index)
static void Generate(MacroAssembler *masm, Register string, Register index, Register result, Label *call_runtime)
static const int kElementsOffset
static const int kNativeBitWithinByte
static const int kContainsCachedArrayIndexMask
static Vector< T > New(int length)
int ElementsKindToShiftSize(ElementsKind elements_kind)
Vector< const char > CStrVector(const char *data)
static int OffsetOfElementAt(int index)
static const int kLengthOffset
static int SizeFor(int length)
static const int kHeaderSize
static const int kMapOffset
static const int kValueOffset
bool is(Register reg) const
static const int kLengthOffset
static Address GetDeoptimizationEntry(int id, BailoutType type)
static const int kHasNonInstancePrototype
const Register kScratchRegister
static const int kContextOffset
static const int kFunctionOffset
ElementsKind GetInitialFastElementsKind()
static const uint32_t kSignMask
static const int kStrictModeByteOffset
Condition NegateCondition(Condition cond)
#define ASSERT_EQ(v1, v2)
static const int kElementsKindShift
static const int kConstructorOffset
static double canonical_not_the_hole_nan_as_double()
#define ASSERT_NE(v1, v2)
static const int kIsUndetectable
static const int kHeaderSize
static const int kMaximumClonedProperties
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if NULL
static const int kPrototypeOffset
#define RUNTIME_ENTRY(name, nargs, ressize)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
static const int kMaxLength
static const int kValueOffset
static const int kNativeContextOffset
static const int kMarkerOffset
static const int kHashShift
static const int kSharedFunctionInfoOffset
Register ToRegister(int num)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print more details following each garbage collection print amount of external allocated memory after each time it is adjusted flush code that we expect not to use again before full gc do incremental marking steps track object counts and memory usage use caching Perform compaction on every full GC Never perform compaction on full GC testing only Compact code space on full incremental collections Default seed for initializing random allows verbose printing trace parsing and preparsing Check icache flushes in ARM and MIPS simulator Stack alingment in bytes in print stack trace when throwing exceptions randomize hashes to avoid predictable hash Fixed seed to use to hash property activate a timer that switches between V8 threads testing_bool_flag float flag Seed used for threading test randomness A filename with extra code to be included in the Print usage including flags
static const int kMaxValue
static const int kBitField2Offset
static HValue * cast(HValue *value)
static Handle< Code > GetUninitialized(Token::Value op)
static const int kMaximumClonedLength
static const int kExponentOffset
bool EvalComparison(Token::Value op, double op1, double op2)
static JSObject * cast(Object *obj)
bool IsFastDoubleElementsKind(ElementsKind kind)
SafepointGenerator(LCodeGen *codegen, LPointerMap *pointers, Safepoint::DeoptMode mode)
static const int kInstanceTypeOffset
virtual void BeforeCall(int call_size) const
static const int kPreAllocatedPropertyFieldsOffset