30 #if defined(V8_TARGET_ARCH_X64)
42 class SafepointGenerator :
public CallWrapper {
45 LPointerMap* pointers,
46 Safepoint::DeoptMode mode)
57 codegen_->RecordSafepoint(pointers_, deopt_mode_);
62 LPointerMap* pointers_;
63 Safepoint::DeoptMode deopt_mode_;
69 bool LCodeGen::GenerateCode() {
70 HPhase phase(
"Z_Code generation", chunk());
77 FrameScope frame_scope(masm_, StackFrame::MANUAL);
79 return GeneratePrologue() &&
81 GenerateDeferredCode() &&
82 GenerateJumpTable() &&
83 GenerateSafepointTable();
87 void LCodeGen::FinishCode(Handle<Code> code) {
89 code->set_stack_slots(GetStackSlotCount());
90 code->set_safepoint_table_offset(safepoints_.GetCodeOffset());
91 PopulateDeoptimizationData(code);
95 void LCodeGen::Abort(
const char* format, ...) {
96 if (FLAG_trace_bailout) {
97 SmartArrayPointer<char>
name(
98 info()->shared_info()->DebugName()->
ToCString());
99 PrintF(
"Aborting LCodeGen in @\"%s\": ", *
name);
101 va_start(arguments, format);
110 void LCodeGen::Comment(
const char* format, ...) {
111 if (!FLAG_code_comments)
return;
113 StringBuilder builder(buffer,
ARRAY_SIZE(buffer));
115 va_start(arguments, format);
116 builder.AddFormattedList(format, arguments);
121 int length = builder.position();
123 memcpy(copy.start(), builder.Finalize(), copy.length());
124 masm()->RecordComment(copy.start());
128 bool LCodeGen::GeneratePrologue() {
132 if (strlen(FLAG_stop_at) > 0 &&
133 info_->function()->name()->IsEqualTo(
CStrVector(FLAG_stop_at))) {
142 if (!info_->is_classic_mode() || info_->is_native()) {
145 __ j(
zero, &ok, Label::kNear);
147 int receiver_offset = (scope()->num_parameters() + 1) *
kPointerSize;
159 int slots = GetStackSlotCount();
161 if (FLAG_debug_code) {
175 const int kPageSize = 4 *
KB;
178 offset -= kPageSize) {
187 if (heap_slots > 0) {
188 Comment(
";;; Allocate local context");
192 FastNewContextStub stub(heap_slots);
195 __ CallRuntime(Runtime::kNewFunctionContext, 1);
197 RecordSafepoint(Safepoint::kNoLazyDeopt);
203 int num_parameters = scope()->num_parameters();
204 for (
int i = 0; i < num_parameters; i++) {
205 Variable* var = scope()->parameter(i);
206 if (var->IsContextSlot()) {
210 __ movq(
rax, Operand(
rbp, parameter_offset));
213 __ movq(Operand(
rsi, context_offset),
rax);
218 Comment(
";;; End allocate local context");
223 __ CallRuntime(Runtime::kTraceEnter, 0);
225 return !is_aborted();
229 bool LCodeGen::GenerateBody() {
231 bool emit_instructions =
true;
232 for (current_instruction_ = 0;
233 !is_aborted() && current_instruction_ < instructions_->length();
234 current_instruction_++) {
235 LInstruction* instr = instructions_->at(current_instruction_);
236 if (instr->IsLabel()) {
238 emit_instructions = !label->HasReplacement();
241 if (emit_instructions) {
242 Comment(
";;; @%d: %s.", current_instruction_, instr->Mnemonic());
243 instr->CompileToNative(
this);
247 return !is_aborted();
251 bool LCodeGen::GenerateJumpTable() {
252 for (
int i = 0; i < jump_table_.length(); i++) {
253 __ bind(&jump_table_[i].label);
256 return !is_aborted();
260 bool LCodeGen::GenerateDeferredCode() {
262 if (deferred_.length() > 0) {
263 for (
int i = 0; !is_aborted() && i < deferred_.length(); i++) {
264 LDeferredCode* code = deferred_[i];
265 __ bind(code->entry());
266 Comment(
";;; Deferred code @%d: %s.",
267 code->instruction_index(),
268 code->instr()->Mnemonic());
270 __ jmp(code->exit());
276 if (!is_aborted()) status_ =
DONE;
277 return !is_aborted();
281 bool LCodeGen::GenerateSafepointTable() {
283 safepoints_.Emit(masm(), GetStackSlotCount());
284 return !is_aborted();
293 XMMRegister LCodeGen::ToDoubleRegister(
int index)
const {
304 XMMRegister LCodeGen::ToDoubleRegister(LOperand* op)
const {
305 ASSERT(op->IsDoubleRegister());
306 return ToDoubleRegister(op->index());
310 bool LCodeGen::IsInteger32Constant(LConstantOperand* op)
const {
311 return op->IsConstantOperand() &&
312 chunk_->LookupLiteralRepresentation(op).IsInteger32();
316 bool LCodeGen::IsTaggedConstant(LConstantOperand* op)
const {
317 return op->IsConstantOperand() &&
318 chunk_->LookupLiteralRepresentation(op).IsTagged();
322 int LCodeGen::ToInteger32(LConstantOperand* op)
const {
323 Handle<Object> value = chunk_->LookupLiteral(op);
324 ASSERT(chunk_->LookupLiteralRepresentation(op).IsInteger32());
325 ASSERT(static_cast<double>(static_cast<int32_t>(value->Number())) ==
327 return static_cast<int32_t>(value->Number());
331 double LCodeGen::ToDouble(LConstantOperand* op)
const {
332 Handle<Object> value = chunk_->LookupLiteral(op);
333 return value->Number();
337 Handle<Object> LCodeGen::ToHandle(LConstantOperand* op)
const {
338 Handle<Object> literal = chunk_->LookupLiteral(op);
339 ASSERT(chunk_->LookupLiteralRepresentation(op).IsTagged());
344 Operand LCodeGen::ToOperand(LOperand* op)
const {
347 ASSERT(op->IsStackSlot() || op->IsDoubleStackSlot());
348 int index = op->index();
360 void LCodeGen::WriteTranslation(LEnvironment* environment,
361 Translation* translation) {
362 if (environment ==
NULL)
return;
365 int translation_size = environment->values()->length();
367 int height = translation_size - environment->parameter_count();
369 WriteTranslation(environment->outer(), translation);
370 int closure_id = DefineDeoptimizationLiteral(environment->closure());
371 switch (environment->frame_type()) {
373 translation->BeginJSFrame(environment->ast_id(), closure_id, height);
376 translation->BeginConstructStubFrame(closure_id, translation_size);
379 translation->BeginArgumentsAdaptorFrame(closure_id, translation_size);
384 for (
int i = 0; i < translation_size; ++i) {
385 LOperand* value = environment->values()->at(i);
388 if (environment->spilled_registers() !=
NULL && value !=
NULL) {
389 if (value->IsRegister() &&
390 environment->spilled_registers()[value->index()] !=
NULL) {
391 translation->MarkDuplicate();
392 AddToTranslation(translation,
393 environment->spilled_registers()[value->index()],
394 environment->HasTaggedValueAt(i));
396 value->IsDoubleRegister() &&
397 environment->spilled_double_registers()[value->index()] !=
NULL) {
398 translation->MarkDuplicate();
401 environment->spilled_double_registers()[value->index()],
406 AddToTranslation(translation, value, environment->HasTaggedValueAt(i));
411 void LCodeGen::AddToTranslation(Translation* translation,
418 translation->StoreArgumentsObject();
419 }
else if (op->IsStackSlot()) {
421 translation->StoreStackSlot(op->index());
423 translation->StoreInt32StackSlot(op->index());
425 }
else if (op->IsDoubleStackSlot()) {
426 translation->StoreDoubleStackSlot(op->index());
427 }
else if (op->IsArgument()) {
429 int src_index = GetStackSlotCount() + op->index();
430 translation->StoreStackSlot(src_index);
431 }
else if (op->IsRegister()) {
434 translation->StoreRegister(reg);
436 translation->StoreInt32Register(reg);
438 }
else if (op->IsDoubleRegister()) {
439 XMMRegister reg = ToDoubleRegister(op);
440 translation->StoreDoubleRegister(reg);
441 }
else if (op->IsConstantOperand()) {
443 int src_index = DefineDeoptimizationLiteral(literal);
444 translation->StoreLiteral(src_index);
451 void LCodeGen::CallCodeGeneric(Handle<Code> code,
452 RelocInfo::Mode mode,
454 SafepointMode safepoint_mode,
458 LPointerMap* pointers = instr->pointer_map();
459 RecordPosition(pointers->position());
461 RecordSafepointWithLazyDeopt(instr, safepoint_mode, argc);
472 void LCodeGen::CallCode(Handle<Code> code,
473 RelocInfo::Mode mode,
474 LInstruction* instr) {
475 CallCodeGeneric(code, mode, instr, RECORD_SIMPLE_SAFEPOINT, 0);
479 void LCodeGen::CallRuntime(
const Runtime::Function*
function,
481 LInstruction* instr) {
483 ASSERT(instr->HasPointerMap());
484 LPointerMap* pointers = instr->pointer_map();
485 RecordPosition(pointers->position());
487 __ CallRuntime(
function, num_arguments);
488 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT, 0);
494 LInstruction* instr) {
496 __ CallRuntimeSaveDoubles(
id);
497 RecordSafepointWithRegisters(
498 instr->pointer_map(), argc, Safepoint::kNoLazyDeopt);
502 void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment,
503 Safepoint::DeoptMode mode) {
504 if (!environment->HasBeenRegistered()) {
519 int jsframe_count = 0;
520 for (LEnvironment* e = environment; e !=
NULL; e = e->outer()) {
526 Translation translation(&translations_, frame_count, jsframe_count,
527 environment->zone());
528 WriteTranslation(environment, &translation);
529 int deoptimization_index = deoptimizations_.length();
530 int pc_offset = masm()->pc_offset();
531 environment->Register(deoptimization_index,
533 (mode == Safepoint::kLazyDeopt) ? pc_offset : -1);
534 deoptimizations_.Add(environment, environment->zone());
539 void LCodeGen::DeoptimizeIf(
Condition cc, LEnvironment* environment) {
540 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt);
541 ASSERT(environment->HasBeenRegistered());
542 int id = environment->deoptimization_index();
545 Abort(
"bailout was not prepared");
554 if (jump_table_.is_empty() ||
555 jump_table_.last().address != entry) {
556 jump_table_.Add(JumpTableEntry(entry), zone());
558 __ j(cc, &jump_table_.last().label);
563 void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) {
564 int length = deoptimizations_.length();
565 if (length == 0)
return;
566 Handle<DeoptimizationInputData> data =
567 factory()->NewDeoptimizationInputData(length,
TENURED);
569 Handle<ByteArray> translations = translations_.CreateByteArray();
570 data->SetTranslationByteArray(*translations);
571 data->SetInlinedFunctionCount(
Smi::FromInt(inlined_function_count_));
573 Handle<FixedArray> literals =
574 factory()->NewFixedArray(deoptimization_literals_.length(),
TENURED);
575 for (
int i = 0; i < deoptimization_literals_.length(); i++) {
576 literals->set(i, *deoptimization_literals_[i]);
578 data->SetLiteralArray(*literals);
584 for (
int i = 0; i < length; i++) {
585 LEnvironment* env = deoptimizations_[i];
587 data->SetTranslationIndex(i,
Smi::FromInt(env->translation_index()));
588 data->SetArgumentsStackHeight(i,
592 code->set_deoptimization_data(*data);
596 int LCodeGen::DefineDeoptimizationLiteral(Handle<Object> literal) {
597 int result = deoptimization_literals_.length();
598 for (
int i = 0; i < deoptimization_literals_.length(); ++i) {
599 if (deoptimization_literals_[i].is_identical_to(literal))
return i;
601 deoptimization_literals_.Add(literal, zone());
606 void LCodeGen::PopulateDeoptimizationLiteralsWithInlinedFunctions() {
607 ASSERT(deoptimization_literals_.length() == 0);
609 const ZoneList<Handle<JSFunction> >* inlined_closures =
610 chunk()->inlined_closures();
612 for (
int i = 0, length = inlined_closures->length();
615 DefineDeoptimizationLiteral(inlined_closures->at(i));
618 inlined_function_count_ = deoptimization_literals_.length();
622 void LCodeGen::RecordSafepointWithLazyDeopt(
623 LInstruction* instr, SafepointMode safepoint_mode,
int argc) {
624 if (safepoint_mode == RECORD_SIMPLE_SAFEPOINT) {
625 RecordSafepoint(instr->pointer_map(), Safepoint::kLazyDeopt);
627 ASSERT(safepoint_mode == RECORD_SAFEPOINT_WITH_REGISTERS);
628 RecordSafepointWithRegisters(
629 instr->pointer_map(), argc, Safepoint::kLazyDeopt);
634 void LCodeGen::RecordSafepoint(
635 LPointerMap* pointers,
636 Safepoint::Kind kind,
638 Safepoint::DeoptMode deopt_mode) {
639 ASSERT(kind == expected_safepoint_kind_);
641 const ZoneList<LOperand*>* operands = pointers->GetNormalizedOperands();
643 Safepoint safepoint = safepoints_.DefineSafepoint(masm(),
644 kind, arguments, deopt_mode);
645 for (
int i = 0; i < operands->length(); i++) {
646 LOperand* pointer = operands->at(i);
647 if (pointer->IsStackSlot()) {
648 safepoint.DefinePointerSlot(pointer->index(), zone());
649 }
else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) {
650 safepoint.DefinePointerRegister(
ToRegister(pointer), zone());
653 if (kind & Safepoint::kWithRegisters) {
655 safepoint.DefinePointerRegister(
rsi, zone());
660 void LCodeGen::RecordSafepoint(LPointerMap* pointers,
661 Safepoint::DeoptMode deopt_mode) {
662 RecordSafepoint(pointers, Safepoint::kSimple, 0, deopt_mode);
666 void LCodeGen::RecordSafepoint(Safepoint::DeoptMode deopt_mode) {
667 LPointerMap empty_pointers(RelocInfo::kNoPosition, zone());
668 RecordSafepoint(&empty_pointers, deopt_mode);
672 void LCodeGen::RecordSafepointWithRegisters(LPointerMap* pointers,
674 Safepoint::DeoptMode deopt_mode) {
675 RecordSafepoint(pointers, Safepoint::kWithRegisters, arguments, deopt_mode);
679 void LCodeGen::RecordPosition(
int position) {
680 if (position == RelocInfo::kNoPosition)
return;
681 masm()->positions_recorder()->RecordPosition(position);
685 void LCodeGen::DoLabel(LLabel* label) {
686 if (label->is_loop_header()) {
687 Comment(
";;; B%d - LOOP entry", label->block_id());
689 Comment(
";;; B%d", label->block_id());
691 __ bind(label->label());
692 current_block_ = label->block_id();
697 void LCodeGen::DoParallelMove(LParallelMove* move) {
698 resolver_.Resolve(move);
702 void LCodeGen::DoGap(LGap* gap) {
707 LParallelMove* move = gap->GetParallelMove(inner_pos);
708 if (move !=
NULL) DoParallelMove(move);
713 void LCodeGen::DoInstructionGap(LInstructionGap* instr) {
718 void LCodeGen::DoParameter(LParameter* instr) {
723 void LCodeGen::DoCallStub(LCallStub* instr) {
725 switch (instr->hydrogen()->major_key()) {
726 case CodeStub::RegExpConstructResult: {
727 RegExpConstructResultStub stub;
728 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
731 case CodeStub::RegExpExec: {
733 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
738 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
741 case CodeStub::NumberToString: {
742 NumberToStringStub stub;
743 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
746 case CodeStub::StringAdd: {
748 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
751 case CodeStub::StringCompare: {
752 StringCompareStub stub;
753 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
756 case CodeStub::TranscendentalCache: {
757 TranscendentalCacheStub stub(instr->transcendental_type(),
759 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
768 void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) {
773 void LCodeGen::DoModI(LModI* instr) {
774 if (instr->hydrogen()->HasPowerOf2Divisor()) {
775 Register dividend =
ToRegister(instr->InputAt(0));
780 if (divisor < 0) divisor = -divisor;
782 Label positive_dividend, done;
783 __ testl(dividend, dividend);
784 __ j(
not_sign, &positive_dividend, Label::kNear);
786 __ andl(dividend, Immediate(divisor - 1));
792 __ jmp(&done, Label::kNear);
794 __ bind(&positive_dividend);
795 __ andl(dividend, Immediate(divisor - 1));
798 Label done, remainder_eq_dividend, slow, do_subtraction, both_positive;
799 Register left_reg =
ToRegister(instr->InputAt(0));
800 Register right_reg =
ToRegister(instr->InputAt(1));
801 Register result_reg =
ToRegister(instr->result());
810 __ testl(right_reg, right_reg);
811 DeoptimizeIf(
zero, instr->environment());
814 __ testl(left_reg, left_reg);
815 __ j(
zero, &remainder_eq_dividend, Label::kNear);
816 __ j(
sign, &slow, Label::kNear);
818 __ testl(right_reg, right_reg);
823 __ bind(&both_positive);
826 __ cmpl(left_reg, right_reg);
827 __ j(
less, &remainder_eq_dividend, Label::kNear);
830 Register scratch =
ToRegister(instr->TempAt(0));
831 __ movl(scratch, right_reg);
832 __ subl(scratch, Immediate(1));
833 __ testl(scratch, right_reg);
834 __ j(
not_zero, &do_subtraction, Label::kNear);
835 __ andl(left_reg, scratch);
836 __ jmp(&remainder_eq_dividend, Label::kNear);
838 __ bind(&do_subtraction);
839 const int kUnfolds = 3;
841 __ movl(scratch, left_reg);
842 for (
int i = 0; i < kUnfolds; i++) {
844 __ subl(left_reg, right_reg);
846 __ cmpl(left_reg, right_reg);
847 __ j(
less, &remainder_eq_dividend, Label::kNear);
849 __ movl(left_reg, scratch);
861 __ testl(left_reg, left_reg);
866 __ testl(result_reg, result_reg);
870 __ bind(&positive_left);
876 __ jmp(&done, Label::kNear);
878 __ bind(&remainder_eq_dividend);
879 __ movl(result_reg, left_reg);
886 void LCodeGen::DoDivI(LDivI* instr) {
887 LOperand* right = instr->InputAt(1);
893 Register left_reg =
rax;
898 __ testl(right_reg, right_reg);
899 DeoptimizeIf(
zero, instr->environment());
905 __ testl(left_reg, left_reg);
907 __ testl(right_reg, right_reg);
908 DeoptimizeIf(
sign, instr->environment());
909 __ bind(&left_not_zero);
914 Label left_not_min_int;
916 __ j(
not_zero, &left_not_min_int, Label::kNear);
917 __ cmpl(right_reg, Immediate(-1));
918 DeoptimizeIf(
zero, instr->environment());
919 __ bind(&left_not_min_int);
928 DeoptimizeIf(
not_zero, instr->environment());
932 void LCodeGen::DoMulI(LMulI* instr) {
933 Register left =
ToRegister(instr->InputAt(0));
934 LOperand* right = instr->InputAt(1);
942 if (right->IsConstantOperand()) {
944 if (right_value == -1) {
946 }
else if (right_value == 0) {
948 }
else if (right_value == 2) {
950 }
else if (!can_overflow) {
954 switch (right_value) {
959 __ leal(left, Operand(left, left,
times_2, 0));
962 __ shll(left, Immediate(2));
965 __ leal(left, Operand(left, left,
times_4, 0));
968 __ shll(left, Immediate(3));
971 __ leal(left, Operand(left, left,
times_8, 0));
974 __ shll(left, Immediate(4));
977 __ imull(left, left, Immediate(right_value));
981 __ imull(left, left, Immediate(right_value));
983 }
else if (right->IsStackSlot()) {
984 __ imull(left, ToOperand(right));
990 DeoptimizeIf(
overflow, instr->environment());
996 __ testl(left, left);
998 if (right->IsConstantOperand()) {
1002 }
else if (right->IsStackSlot()) {
1004 DeoptimizeIf(
sign, instr->environment());
1008 DeoptimizeIf(
sign, instr->environment());
1015 void LCodeGen::DoBitI(LBitI* instr) {
1016 LOperand* left = instr->InputAt(0);
1017 LOperand* right = instr->InputAt(1);
1018 ASSERT(left->Equals(instr->result()));
1019 ASSERT(left->IsRegister());
1021 if (right->IsConstantOperand()) {
1023 switch (instr->op()) {
1024 case Token::BIT_AND:
1030 case Token::BIT_XOR:
1037 }
else if (right->IsStackSlot()) {
1038 switch (instr->op()) {
1039 case Token::BIT_AND:
1045 case Token::BIT_XOR:
1053 ASSERT(right->IsRegister());
1054 switch (instr->op()) {
1055 case Token::BIT_AND:
1061 case Token::BIT_XOR:
1072 void LCodeGen::DoShiftI(LShiftI* instr) {
1073 LOperand* left = instr->InputAt(0);
1074 LOperand* right = instr->InputAt(1);
1075 ASSERT(left->Equals(instr->result()));
1076 ASSERT(left->IsRegister());
1077 if (right->IsRegister()) {
1080 switch (instr->op()) {
1086 if (instr->can_deopt()) {
1088 DeoptimizeIf(
negative, instr->environment());
1100 uint8_t shift_count =
static_cast<uint8_t
>(value & 0x1F);
1101 switch (instr->op()) {
1103 if (shift_count != 0) {
1108 if (shift_count == 0 && instr->can_deopt()) {
1110 DeoptimizeIf(
negative, instr->environment());
1116 if (shift_count != 0) {
1128 void LCodeGen::DoSubI(LSubI* instr) {
1129 LOperand* left = instr->InputAt(0);
1130 LOperand* right = instr->InputAt(1);
1131 ASSERT(left->Equals(instr->result()));
1133 if (right->IsConstantOperand()) {
1136 }
else if (right->IsRegister()) {
1143 DeoptimizeIf(
overflow, instr->environment());
1148 void LCodeGen::DoConstantI(LConstantI* instr) {
1149 ASSERT(instr->result()->IsRegister());
1154 void LCodeGen::DoConstantD(LConstantD* instr) {
1155 ASSERT(instr->result()->IsDoubleRegister());
1156 XMMRegister res = ToDoubleRegister(instr->result());
1157 double v = instr->value();
1158 uint64_t int_val = BitCast<uint64_t, double>(v);
1165 __ Set(tmp, int_val);
1171 void LCodeGen::DoConstantT(LConstantT* instr) {
1172 Handle<Object> value = instr->value();
1173 if (value->IsSmi()) {
1177 Handle<HeapObject>::cast(value));
1182 void LCodeGen::DoJSArrayLength(LJSArrayLength* instr) {
1183 Register result =
ToRegister(instr->result());
1184 Register array =
ToRegister(instr->InputAt(0));
1189 void LCodeGen::DoFixedArrayBaseLength(LFixedArrayBaseLength* instr) {
1190 Register result =
ToRegister(instr->result());
1191 Register array =
ToRegister(instr->InputAt(0));
1196 void LCodeGen::DoElementsKind(LElementsKind* instr) {
1197 Register result =
ToRegister(instr->result());
1198 Register input =
ToRegister(instr->InputAt(0));
1210 void LCodeGen::DoValueOf(LValueOf* instr) {
1211 Register input =
ToRegister(instr->InputAt(0));
1212 Register result =
ToRegister(instr->result());
1213 ASSERT(input.is(result));
1216 __ JumpIfSmi(input, &done, Label::kNear);
1227 void LCodeGen::DoDateField(LDateField* instr) {
1228 Register
object =
ToRegister(instr->InputAt(0));
1229 Register result =
ToRegister(instr->result());
1230 Smi* index = instr->index();
1231 Label runtime, done;
1232 ASSERT(
object.is(result));
1236 __ AbortIfSmi(
object);
1238 __ Assert(
equal,
"Trying to get date field from non-date.");
1241 if (index->value() == 0) {
1245 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
1255 __ PrepareCallCFunction(2);
1257 __ movq(
rcx,
object);
1260 __ movq(
rdi,
object);
1263 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
1270 void LCodeGen::DoBitNotI(LBitNotI* instr) {
1271 LOperand* input = instr->InputAt(0);
1272 ASSERT(input->Equals(instr->result()));
1277 void LCodeGen::DoThrow(LThrow* instr) {
1279 CallRuntime(Runtime::kThrow, 1, instr);
1281 if (FLAG_debug_code) {
1282 Comment(
"Unreachable code.");
1288 void LCodeGen::DoAddI(LAddI* instr) {
1289 LOperand* left = instr->InputAt(0);
1290 LOperand* right = instr->InputAt(1);
1291 ASSERT(left->Equals(instr->result()));
1293 if (right->IsConstantOperand()) {
1296 }
else if (right->IsRegister()) {
1303 DeoptimizeIf(
overflow, instr->environment());
1308 void LCodeGen::DoArithmeticD(LArithmeticD* instr) {
1309 XMMRegister left = ToDoubleRegister(instr->InputAt(0));
1310 XMMRegister right = ToDoubleRegister(instr->InputAt(1));
1311 XMMRegister result = ToDoubleRegister(instr->result());
1313 ASSERT(instr->op() == Token::MOD || left.is(result));
1314 switch (instr->op()) {
1316 __ addsd(left, right);
1319 __ subsd(left, right);
1322 __ mulsd(left, right);
1325 __ divsd(left, right);
1328 __ PrepareCallCFunction(2);
1332 ExternalReference::double_fp_operation(Token::MOD, isolate()), 2);
1343 void LCodeGen::DoArithmeticT(LArithmeticT* instr) {
1349 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
1354 int LCodeGen::GetNextEmittedBlock(
int block) {
1355 for (
int i = block + 1; i < graph()->blocks()->length(); ++i) {
1356 LLabel* label = chunk_->GetLabel(i);
1357 if (!label->HasReplacement())
return i;
1363 void LCodeGen::EmitBranch(
int left_block,
int right_block,
Condition cc) {
1364 int next_block = GetNextEmittedBlock(current_block_);
1365 right_block = chunk_->LookupDestination(right_block);
1366 left_block = chunk_->LookupDestination(left_block);
1368 if (right_block == left_block) {
1369 EmitGoto(left_block);
1370 }
else if (left_block == next_block) {
1372 }
else if (right_block == next_block) {
1373 __ j(cc, chunk_->GetAssemblyLabel(left_block));
1375 __ j(cc, chunk_->GetAssemblyLabel(left_block));
1377 __ jmp(chunk_->GetAssemblyLabel(right_block));
1383 void LCodeGen::DoBranch(LBranch* instr) {
1384 int true_block = chunk_->LookupDestination(instr->true_block_id());
1385 int false_block = chunk_->LookupDestination(instr->false_block_id());
1387 Representation r = instr->hydrogen()->value()->representation();
1388 if (r.IsInteger32()) {
1389 Register reg =
ToRegister(instr->InputAt(0));
1391 EmitBranch(true_block, false_block,
not_zero);
1392 }
else if (r.IsDouble()) {
1393 XMMRegister reg = ToDoubleRegister(instr->InputAt(0));
1396 EmitBranch(true_block, false_block,
not_equal);
1399 Register reg =
ToRegister(instr->InputAt(0));
1400 HType
type = instr->hydrogen()->value()->type();
1401 if (type.IsBoolean()) {
1402 __ CompareRoot(reg, Heap::kTrueValueRootIndex);
1403 EmitBranch(true_block, false_block,
equal);
1404 }
else if (type.IsSmi()) {
1406 EmitBranch(true_block, false_block,
not_equal);
1408 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1409 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1411 ToBooleanStub::Types expected = instr->hydrogen()->expected_input_types();
1417 __ CompareRoot(reg, Heap::kUndefinedValueRootIndex);
1422 __ CompareRoot(reg, Heap::kTrueValueRootIndex);
1425 __ CompareRoot(reg, Heap::kFalseValueRootIndex);
1430 __ CompareRoot(reg, Heap::kNullValueRootIndex);
1438 __ JumpIfSmi(reg, true_label);
1439 }
else if (expected.NeedsMap()) {
1442 DeoptimizeIf(
zero, instr->environment());
1446 if (expected.NeedsMap()) {
1449 if (expected.CanBeUndetectable()) {
1470 __ jmp(false_label);
1471 __ bind(¬_string);
1476 Label not_heap_number;
1477 __ CompareRoot(map, Heap::kHeapNumberMapRootIndex);
1483 __ bind(¬_heap_number);
1493 void LCodeGen::EmitGoto(
int block) {
1494 block = chunk_->LookupDestination(block);
1495 int next_block = GetNextEmittedBlock(current_block_);
1496 if (block != next_block) {
1497 __ jmp(chunk_->GetAssemblyLabel(block));
1502 void LCodeGen::DoGoto(LGoto* instr) {
1503 EmitGoto(instr->block_id());
1511 case Token::EQ_STRICT:
1527 case Token::INSTANCEOF:
1535 void LCodeGen::DoCmpIDAndBranch(LCmpIDAndBranch* instr) {
1536 LOperand* left = instr->InputAt(0);
1537 LOperand* right = instr->InputAt(1);
1538 int false_block = chunk_->LookupDestination(instr->false_block_id());
1539 int true_block = chunk_->LookupDestination(instr->true_block_id());
1540 Condition cc = TokenToCondition(instr->op(), instr->is_double());
1542 if (left->IsConstantOperand() && right->IsConstantOperand()) {
1549 EmitGoto(next_block);
1551 if (instr->is_double()) {
1554 __ ucomisd(ToDoubleRegister(left), ToDoubleRegister(right));
1558 if (right->IsConstantOperand()) {
1561 }
else if (left->IsConstantOperand()) {
1563 if (right->IsRegister()) {
1566 __ cmpl(ToOperand(right), Immediate(value));
1571 if (right->IsRegister()) {
1578 EmitBranch(true_block, false_block, cc);
1583 void LCodeGen::DoCmpObjectEqAndBranch(LCmpObjectEqAndBranch* instr) {
1584 Register left =
ToRegister(instr->InputAt(0));
1585 Register right =
ToRegister(instr->InputAt(1));
1586 int false_block = chunk_->LookupDestination(instr->false_block_id());
1587 int true_block = chunk_->LookupDestination(instr->true_block_id());
1589 __ cmpq(left, right);
1590 EmitBranch(true_block, false_block,
equal);
1594 void LCodeGen::DoCmpConstantEqAndBranch(LCmpConstantEqAndBranch* instr) {
1595 Register left =
ToRegister(instr->InputAt(0));
1596 int true_block = chunk_->LookupDestination(instr->true_block_id());
1597 int false_block = chunk_->LookupDestination(instr->false_block_id());
1599 __ cmpq(left, Immediate(instr->hydrogen()->right()));
1600 EmitBranch(true_block, false_block,
equal);
1604 void LCodeGen::DoIsNilAndBranch(LIsNilAndBranch* instr) {
1605 Register reg =
ToRegister(instr->InputAt(0));
1606 int false_block = chunk_->LookupDestination(instr->false_block_id());
1610 if (instr->hydrogen()->representation().IsSpecialization() ||
1611 instr->hydrogen()->type().IsSmi()) {
1612 EmitGoto(false_block);
1616 int true_block = chunk_->LookupDestination(instr->true_block_id());
1618 Heap::kNullValueRootIndex :
1619 Heap::kUndefinedValueRootIndex;
1620 __ CompareRoot(reg, nil_value);
1622 EmitBranch(true_block, false_block,
equal);
1625 Heap::kUndefinedValueRootIndex :
1626 Heap::kNullValueRootIndex;
1627 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1628 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1630 __ CompareRoot(reg, other_nil_value);
1632 __ JumpIfSmi(reg, false_label);
1635 Register scratch =
ToRegister(instr->TempAt(0));
1639 EmitBranch(true_block, false_block,
not_zero);
1644 Condition LCodeGen::EmitIsObject(Register input,
1645 Label* is_not_object,
1649 __ JumpIfSmi(input, is_not_object);
1651 __ CompareRoot(input, Heap::kNullValueRootIndex);
1669 void LCodeGen::DoIsObjectAndBranch(LIsObjectAndBranch* instr) {
1670 Register reg =
ToRegister(instr->InputAt(0));
1672 int true_block = chunk_->LookupDestination(instr->true_block_id());
1673 int false_block = chunk_->LookupDestination(instr->false_block_id());
1674 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1675 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1677 Condition true_cond = EmitIsObject(reg, false_label, true_label);
1679 EmitBranch(true_block, false_block, true_cond);
1683 Condition LCodeGen::EmitIsString(Register input,
1685 Label* is_not_string) {
1686 __ JumpIfSmi(input, is_not_string);
1687 Condition cond = masm_->IsObjectStringType(input, temp1, temp1);
1693 void LCodeGen::DoIsStringAndBranch(LIsStringAndBranch* instr) {
1694 Register reg =
ToRegister(instr->InputAt(0));
1695 Register temp =
ToRegister(instr->TempAt(0));
1697 int true_block = chunk_->LookupDestination(instr->true_block_id());
1698 int false_block = chunk_->LookupDestination(instr->false_block_id());
1699 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1701 Condition true_cond = EmitIsString(reg, temp, false_label);
1703 EmitBranch(true_block, false_block, true_cond);
1707 void LCodeGen::DoIsSmiAndBranch(LIsSmiAndBranch* instr) {
1708 int true_block = chunk_->LookupDestination(instr->true_block_id());
1709 int false_block = chunk_->LookupDestination(instr->false_block_id());
1712 if (instr->InputAt(0)->IsRegister()) {
1713 Register input =
ToRegister(instr->InputAt(0));
1714 is_smi = masm()->CheckSmi(input);
1716 Operand input = ToOperand(instr->InputAt(0));
1717 is_smi = masm()->CheckSmi(input);
1719 EmitBranch(true_block, false_block, is_smi);
1723 void LCodeGen::DoIsUndetectableAndBranch(LIsUndetectableAndBranch* instr) {
1724 Register input =
ToRegister(instr->InputAt(0));
1725 Register temp =
ToRegister(instr->TempAt(0));
1727 int true_block = chunk_->LookupDestination(instr->true_block_id());
1728 int false_block = chunk_->LookupDestination(instr->false_block_id());
1730 __ JumpIfSmi(input, chunk_->GetAssemblyLabel(false_block));
1734 EmitBranch(true_block, false_block,
not_zero);
1738 void LCodeGen::DoStringCompareAndBranch(LStringCompareAndBranch* instr) {
1740 int true_block = chunk_->LookupDestination(instr->true_block_id());
1741 int false_block = chunk_->LookupDestination(instr->false_block_id());
1744 CallCode(ic, RelocInfo::CODE_TARGET, instr);
1746 Condition condition = TokenToCondition(op,
false);
1749 EmitBranch(true_block, false_block, condition);
1753 static InstanceType TestType(HHasInstanceTypeAndBranch* instr) {
1762 static Condition BranchCondition(HHasInstanceTypeAndBranch* instr) {
1765 if (from == to)
return equal;
1773 void LCodeGen::DoHasInstanceTypeAndBranch(LHasInstanceTypeAndBranch* instr) {
1774 Register input =
ToRegister(instr->InputAt(0));
1776 int true_block = chunk_->LookupDestination(instr->true_block_id());
1777 int false_block = chunk_->LookupDestination(instr->false_block_id());
1779 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1781 __ JumpIfSmi(input, false_label);
1784 EmitBranch(true_block, false_block, BranchCondition(instr->hydrogen()));
1788 void LCodeGen::DoGetCachedArrayIndex(LGetCachedArrayIndex* instr) {
1789 Register input =
ToRegister(instr->InputAt(0));
1790 Register result =
ToRegister(instr->result());
1792 if (FLAG_debug_code) {
1793 __ AbortIfNotString(input);
1798 __ IndexFromHash(result, result);
1802 void LCodeGen::DoHasCachedArrayIndexAndBranch(
1803 LHasCachedArrayIndexAndBranch* instr) {
1804 Register input =
ToRegister(instr->InputAt(0));
1806 int true_block = chunk_->LookupDestination(instr->true_block_id());
1807 int false_block = chunk_->LookupDestination(instr->false_block_id());
1811 EmitBranch(true_block, false_block,
equal);
1817 void LCodeGen::EmitClassOfTest(Label* is_true,
1819 Handle<String> class_name,
1824 ASSERT(!input.is(temp2));
1827 __ JumpIfSmi(input, is_false);
1829 if (class_name->IsEqualTo(
CStrVector(
"Function"))) {
1860 if (class_name->IsEqualTo(
CStrVector(
"Object"))) {
1877 ASSERT(class_name->IsSymbol());
1878 __ Cmp(temp, class_name);
1883 void LCodeGen::DoClassOfTestAndBranch(LClassOfTestAndBranch* instr) {
1884 Register input =
ToRegister(instr->InputAt(0));
1885 Register temp =
ToRegister(instr->TempAt(0));
1886 Register temp2 =
ToRegister(instr->TempAt(1));
1887 Handle<String> class_name = instr->hydrogen()->class_name();
1889 int true_block = chunk_->LookupDestination(instr->true_block_id());
1890 int false_block = chunk_->LookupDestination(instr->false_block_id());
1892 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1893 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1895 EmitClassOfTest(true_label, false_label, class_name, input, temp, temp2);
1897 EmitBranch(true_block, false_block,
equal);
1901 void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) {
1902 Register reg =
ToRegister(instr->InputAt(0));
1903 int true_block = instr->true_block_id();
1904 int false_block = instr->false_block_id();
1907 EmitBranch(true_block, false_block,
equal);
1911 void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
1915 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
1916 Label true_value, done;
1918 __ j(
zero, &true_value, Label::kNear);
1919 __ LoadRoot(
ToRegister(instr->result()), Heap::kFalseValueRootIndex);
1920 __ jmp(&done, Label::kNear);
1921 __ bind(&true_value);
1922 __ LoadRoot(
ToRegister(instr->result()), Heap::kTrueValueRootIndex);
1927 void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
1928 class DeferredInstanceOfKnownGlobal:
public LDeferredCode {
1930 DeferredInstanceOfKnownGlobal(LCodeGen* codegen,
1931 LInstanceOfKnownGlobal* instr)
1932 : LDeferredCode(codegen), instr_(instr) { }
1933 virtual void Generate() {
1934 codegen()->DoDeferredInstanceOfKnownGlobal(instr_, &map_check_);
1936 virtual LInstruction* instr() {
return instr_; }
1937 Label* map_check() {
return &map_check_; }
1939 LInstanceOfKnownGlobal* instr_;
1944 DeferredInstanceOfKnownGlobal* deferred;
1945 deferred =
new(zone()) DeferredInstanceOfKnownGlobal(
this, instr);
1947 Label done, false_result;
1948 Register
object =
ToRegister(instr->InputAt(0));
1951 __ JumpIfSmi(
object, &false_result);
1960 __ bind(deferred->map_check());
1961 Handle<JSGlobalPropertyCell> cache_cell =
1962 factory()->NewJSGlobalPropertyCell(factory()->the_hole_value());
1967 __ LoadRoot(
ToRegister(instr->result()), Heap::kTheHoleValueRootIndex);
1970 Label end_of_patched_code;
1971 __ bind(&end_of_patched_code);
1978 __ bind(&cache_miss);
1979 __ CompareRoot(
object, Heap::kNullValueRootIndex);
1980 __ j(
equal, &false_result, Label::kNear);
1985 __ bind(&false_result);
1986 __ LoadRoot(
ToRegister(instr->result()), Heap::kFalseValueRootIndex);
1988 __ bind(deferred->exit());
1993 void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
1996 PushSafepointRegistersScope scope(
this);
1999 InstanceofStub stub(flags);
2002 __ PushHeapObject(instr->function());
2004 static const int kAdditionalDelta = 10;
2006 masm_->SizeOfCodeGeneratedSince(map_check) + kAdditionalDelta;
2008 __ push_imm32(delta);
2014 CallCodeGeneric(stub.GetCode(),
2015 RelocInfo::CODE_TARGET,
2017 RECORD_SAFEPOINT_WITH_REGISTERS,
2019 ASSERT(delta == masm_->SizeOfCodeGeneratedSince(map_check));
2020 LEnvironment* env = instr->GetDeferredLazyDeoptimizationEnvironment();
2021 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
2030 __ LoadRoot(
rax, Heap::kTrueValueRootIndex);
2032 __ bind(&load_false);
2033 __ LoadRoot(
rax, Heap::kFalseValueRootIndex);
2038 void LCodeGen::DoCmpT(LCmpT* instr) {
2042 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2044 Condition condition = TokenToCondition(op,
false);
2045 Label true_value, done;
2047 __ j(condition, &true_value, Label::kNear);
2048 __ LoadRoot(
ToRegister(instr->result()), Heap::kFalseValueRootIndex);
2049 __ jmp(&done, Label::kNear);
2050 __ bind(&true_value);
2051 __ LoadRoot(
ToRegister(instr->result()), Heap::kTrueValueRootIndex);
2056 void LCodeGen::DoReturn(LReturn* instr) {
2061 __ CallRuntime(Runtime::kTraceExit, 1);
2069 void LCodeGen::DoLoadGlobalCell(LLoadGlobalCell* instr) {
2070 Register result =
ToRegister(instr->result());
2071 __ LoadGlobalCell(result, instr->hydrogen()->cell());
2072 if (instr->hydrogen()->RequiresHoleCheck()) {
2073 __ CompareRoot(result, Heap::kTheHoleValueRootIndex);
2074 DeoptimizeIf(
equal, instr->environment());
2079 void LCodeGen::DoLoadGlobalGeneric(LLoadGlobalGeneric* instr) {
2083 __ Move(
rcx, instr->name());
2084 RelocInfo::Mode mode = instr->for_typeof() ? RelocInfo::CODE_TARGET :
2085 RelocInfo::CODE_TARGET_CONTEXT;
2086 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
2087 CallCode(ic, mode, instr);
2091 void LCodeGen::DoStoreGlobalCell(LStoreGlobalCell* instr) {
2093 Handle<JSGlobalPropertyCell> cell_handle = instr->hydrogen()->cell();
2099 if (instr->hydrogen()->RequiresHoleCheck()) {
2101 Register cell =
ToRegister(instr->TempAt(0));
2103 __ movq(cell, cell_handle, RelocInfo::GLOBAL_PROPERTY_CELL);
2104 __ CompareRoot(Operand(cell, 0), Heap::kTheHoleValueRootIndex);
2105 DeoptimizeIf(
equal, instr->environment());
2107 __ movq(Operand(cell, 0), value);
2117 void LCodeGen::DoStoreGlobalGeneric(LStoreGlobalGeneric* instr) {
2121 __ Move(
rcx, instr->name());
2122 Handle<Code> ic = (instr->strict_mode_flag() ==
kStrictMode)
2123 ? isolate()->builtins()->StoreIC_Initialize_Strict()
2124 : isolate()->builtins()->StoreIC_Initialize();
2125 CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr);
2129 void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) {
2130 Register context =
ToRegister(instr->context());
2131 Register result =
ToRegister(instr->result());
2133 if (instr->hydrogen()->RequiresHoleCheck()) {
2134 __ CompareRoot(result, Heap::kTheHoleValueRootIndex);
2135 if (instr->hydrogen()->DeoptimizesOnHole()) {
2136 DeoptimizeIf(
equal, instr->environment());
2140 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
2141 __ bind(&is_not_hole);
2147 void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) {
2148 Register context =
ToRegister(instr->context());
2153 Label skip_assignment;
2154 if (instr->hydrogen()->RequiresHoleCheck()) {
2155 __ CompareRoot(target, Heap::kTheHoleValueRootIndex);
2156 if (instr->hydrogen()->DeoptimizesOnHole()) {
2157 DeoptimizeIf(
equal, instr->environment());
2162 __ movq(target, value);
2164 if (instr->hydrogen()->NeedsWriteBarrier()) {
2165 HType type = instr->hydrogen()->value()->type();
2169 Register scratch =
ToRegister(instr->TempAt(0));
2170 __ RecordWriteContextSlot(context,
2179 __ bind(&skip_assignment);
2183 void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) {
2184 Register
object =
ToRegister(instr->InputAt(0));
2185 Register result =
ToRegister(instr->result());
2186 if (instr->hydrogen()->is_in_object()) {
2187 __ movq(result,
FieldOperand(
object, instr->hydrogen()->offset()));
2190 __ movq(result,
FieldOperand(result, instr->hydrogen()->offset()));
2195 void LCodeGen::EmitLoadFieldOrConstantFunction(Register result,
2198 Handle<String>
name,
2199 LEnvironment* env) {
2200 LookupResult lookup(isolate());
2201 type->LookupInDescriptors(
NULL, *name, &lookup);
2202 ASSERT(lookup.IsFound() || lookup.IsCacheable());
2203 if (lookup.IsFound() && lookup.type() ==
FIELD) {
2204 int index = lookup.GetLocalFieldIndexFromMap(*type);
2209 __ movq(result,
FieldOperand(
object, offset + type->instance_size()));
2216 Handle<JSFunction>
function(lookup.GetConstantFunctionFromMap(*type));
2217 __ LoadHeapObject(result,
function);
2223 while (current != heap->null_value()) {
2224 Handle<HeapObject> link(current);
2225 __ LoadHeapObject(result, link);
2231 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
2238 static bool CompactEmit(
2239 SmallMapList* list, Handle<String> name,
int i, Isolate* isolate) {
2240 LookupResult lookup(isolate);
2241 Handle<Map> map = list->at(i);
2242 map->LookupInDescriptors(
NULL, *name, &lookup);
2243 return lookup.IsFound() &&
2248 void LCodeGen::DoLoadNamedFieldPolymorphic(LLoadNamedFieldPolymorphic* instr) {
2249 Register
object =
ToRegister(instr->object());
2250 Register result =
ToRegister(instr->result());
2252 int map_count = instr->hydrogen()->types()->length();
2253 bool need_generic = instr->hydrogen()->need_generic();
2255 if (map_count == 0 && !need_generic) {
2259 Handle<String> name = instr->hydrogen()->name();
2261 bool all_are_compact =
true;
2262 for (
int i = 0; i < map_count; ++i) {
2263 if (!CompactEmit(instr->hydrogen()->types(),
name, i, isolate())) {
2264 all_are_compact =
false;
2268 for (
int i = 0; i < map_count; ++i) {
2269 bool last = (i == map_count - 1);
2270 Handle<Map> map = instr->hydrogen()->types()->at(i);
2273 if (last && !need_generic) {
2274 DeoptimizeIf(
not_equal, instr->environment());
2275 __ bind(&check_passed);
2276 EmitLoadFieldOrConstantFunction(
2277 result,
object, map, name, instr->environment());
2280 bool compact = all_are_compact ?
true :
2281 CompactEmit(instr->hydrogen()->types(),
name, i, isolate());
2282 __ j(
not_equal, &next, compact ? Label::kNear : Label::kFar);
2283 __ bind(&check_passed);
2284 EmitLoadFieldOrConstantFunction(
2285 result,
object, map, name, instr->environment());
2286 __ jmp(&done, all_are_compact ? Label::kNear : Label::kFar);
2292 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
2293 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2299 void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) {
2303 __ Move(
rcx, instr->name());
2304 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
2305 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2309 void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) {
2310 Register
function =
ToRegister(instr->function());
2311 Register result =
ToRegister(instr->result());
2315 DeoptimizeIf(
not_equal, instr->environment());
2328 __ CompareRoot(result, Heap::kTheHoleValueRootIndex);
2329 DeoptimizeIf(
equal, instr->environment());
2338 __ jmp(&done, Label::kNear);
2342 __ bind(&non_instance);
2350 void LCodeGen::DoLoadElements(LLoadElements* instr) {
2351 Register result =
ToRegister(instr->result());
2352 Register input =
ToRegister(instr->InputAt(0));
2354 if (FLAG_debug_code) {
2355 Label done, ok, fail;
2357 Heap::kFixedArrayMapRootIndex);
2358 __ j(
equal, &done, Label::kNear);
2360 Heap::kFixedCOWArrayMapRootIndex);
2361 __ j(
equal, &done, Label::kNear);
2362 Register temp((result.is(
rax)) ?
rbx :
rax);
2369 __ j(
less, &fail, Label::kNear);
2373 __ j(
less, &fail, Label::kNear);
2377 __ Abort(
"Check for fast or external elements failed");
2385 void LCodeGen::DoLoadExternalArrayPointer(
2386 LLoadExternalArrayPointer* instr) {
2387 Register result =
ToRegister(instr->result());
2388 Register input =
ToRegister(instr->InputAt(0));
2394 void LCodeGen::DoAccessArgumentsAt(LAccessArgumentsAt* instr) {
2395 Register arguments =
ToRegister(instr->arguments());
2396 Register length =
ToRegister(instr->length());
2397 Register result =
ToRegister(instr->result());
2399 if (instr->index()->IsRegister()) {
2402 __ subl(length, ToOperand(instr->index()));
2412 void LCodeGen::DoLoadKeyedFastElement(LLoadKeyedFastElement* instr) {
2413 Register result =
ToRegister(instr->result());
2415 if (instr->hydrogen()->IsDehoisted() && !instr->key()->IsConstantOperand()) {
2419 __ movsxlq(key_reg, key_reg);
2424 BuildFastArrayOperand(instr->elements(),
2428 instr->additional_index()));
2431 if (instr->hydrogen()->RequiresHoleCheck()) {
2436 __ CompareRoot(result, Heap::kTheHoleValueRootIndex);
2437 DeoptimizeIf(
equal, instr->environment());
2443 void LCodeGen::DoLoadKeyedFastDoubleElement(
2444 LLoadKeyedFastDoubleElement* instr) {
2445 XMMRegister result(ToDoubleRegister(instr->result()));
2447 if (instr->hydrogen()->IsDehoisted() && !instr->key()->IsConstantOperand()) {
2451 __ movsxlq(key_reg, key_reg);
2454 if (instr->hydrogen()->RequiresHoleCheck()) {
2457 Operand hole_check_operand = BuildFastArrayOperand(
2462 instr->additional_index());
2464 DeoptimizeIf(
equal, instr->environment());
2467 Operand double_load_operand = BuildFastArrayOperand(
2472 instr->additional_index());
2473 __ movsd(result, double_load_operand);
2477 Operand LCodeGen::BuildFastArrayOperand(
2478 LOperand* elements_pointer,
2482 uint32_t additional_index) {
2483 Register elements_pointer_reg =
ToRegister(elements_pointer);
2485 if (key->IsConstantOperand()) {
2487 if (constant_value & 0xF0000000) {
2488 Abort(
"array index constant value too big");
2490 return Operand(elements_pointer_reg,
2491 ((constant_value + additional_index) << shift_size)
2495 return Operand(elements_pointer_reg,
2498 offset + (additional_index << shift_size));
2503 void LCodeGen::DoLoadKeyedSpecializedArrayElement(
2504 LLoadKeyedSpecializedArrayElement* instr) {
2506 Operand operand(BuildFastArrayOperand(instr->external_pointer(),
2510 instr->additional_index()));
2511 if (instr->hydrogen()->IsDehoisted() && !instr->key()->IsConstantOperand()) {
2515 __ movsxlq(key_reg, key_reg);
2519 XMMRegister result(ToDoubleRegister(instr->result()));
2520 __ movss(result, operand);
2521 __ cvtss2sd(result, result);
2523 __ movsd(ToDoubleRegister(instr->result()), operand);
2525 Register result(
ToRegister(instr->result()));
2526 switch (elements_kind) {
2528 __ movsxbq(result, operand);
2532 __ movzxbq(result, operand);
2535 __ movsxwq(result, operand);
2538 __ movzxwq(result, operand);
2541 __ movsxlq(result, operand);
2544 __ movl(result, operand);
2545 __ testl(result, result);
2549 DeoptimizeIf(
negative, instr->environment());
2568 void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) {
2572 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
2573 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2577 void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) {
2578 Register result =
ToRegister(instr->result());
2580 if (instr->hydrogen()->from_inlined()) {
2581 __ lea(result, Operand(
rsp, -2 * kPointerSize));
2584 Label done, adapted;
2588 __ j(
equal, &adapted, Label::kNear);
2591 __ movq(result,
rbp);
2592 __ jmp(&done, Label::kNear);
2605 void LCodeGen::DoArgumentsLength(LArgumentsLength* instr) {
2606 Register result =
ToRegister(instr->result());
2611 if (instr->InputAt(0)->IsRegister()) {
2614 __ cmpq(
rbp, ToOperand(instr->InputAt(0)));
2616 __ movl(result, Immediate(scope()->num_parameters()));
2617 __ j(
equal, &done, Label::kNear);
2621 __ SmiToInteger32(result,
2630 void LCodeGen::DoWrapReceiver(LWrapReceiver* instr) {
2631 Register receiver =
ToRegister(instr->receiver());
2632 Register
function =
ToRegister(instr->function());
2637 Label global_object, receiver_ok;
2655 __ CompareRoot(receiver, Heap::kNullValueRootIndex);
2656 __ j(
equal, &global_object, Label::kNear);
2657 __ CompareRoot(receiver, Heap::kUndefinedValueRootIndex);
2658 __ j(
equal, &global_object, Label::kNear);
2662 DeoptimizeIf(is_smi, instr->environment());
2664 DeoptimizeIf(
below, instr->environment());
2665 __ jmp(&receiver_ok, Label::kNear);
2667 __ bind(&global_object);
2674 __ bind(&receiver_ok);
2678 void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
2679 Register receiver =
ToRegister(instr->receiver());
2680 Register
function =
ToRegister(instr->function());
2681 Register length =
ToRegister(instr->length());
2682 Register elements =
ToRegister(instr->elements());
2689 const uint32_t kArgumentsLimit = 1 *
KB;
2690 __ cmpq(length, Immediate(kArgumentsLimit));
2691 DeoptimizeIf(
above, instr->environment());
2694 __ movq(receiver, length);
2700 __ testl(length, length);
2701 __ j(
zero, &invoke, Label::kNear);
2709 ASSERT(instr->HasPointerMap());
2710 LPointerMap* pointers = instr->pointer_map();
2711 RecordPosition(pointers->position());
2712 SafepointGenerator safepoint_generator(
2713 this, pointers, Safepoint::kLazyDeopt);
2714 ParameterCount actual(
rax);
2721 void LCodeGen::DoPushArgument(LPushArgument* instr) {
2722 LOperand* argument = instr->InputAt(0);
2723 EmitPushTaggedOperand(argument);
2727 void LCodeGen::DoDrop(LDrop* instr) {
2728 __ Drop(instr->count());
2732 void LCodeGen::DoThisFunction(LThisFunction* instr) {
2733 Register result =
ToRegister(instr->result());
2734 __ LoadHeapObject(result, instr->hydrogen()->closure());
2738 void LCodeGen::DoContext(LContext* instr) {
2739 Register result =
ToRegister(instr->result());
2740 __ movq(result,
rsi);
2744 void LCodeGen::DoOuterContext(LOuterContext* instr) {
2745 Register context =
ToRegister(instr->context());
2746 Register result =
ToRegister(instr->result());
2752 void LCodeGen::DoDeclareGlobals(LDeclareGlobals* instr) {
2754 __ PushHeapObject(instr->hydrogen()->pairs());
2756 CallRuntime(Runtime::kDeclareGlobals, 3, instr);
2760 void LCodeGen::DoGlobalObject(LGlobalObject* instr) {
2761 Register result =
ToRegister(instr->result());
2766 void LCodeGen::DoGlobalReceiver(LGlobalReceiver* instr) {
2767 Register global =
ToRegister(instr->global());
2768 Register result =
ToRegister(instr->result());
2773 void LCodeGen::CallKnownFunction(Handle<JSFunction>
function,
2775 LInstruction* instr,
2777 RDIState rdi_state) {
2778 bool can_invoke_directly = !
function->NeedsArgumentsAdaption() ||
2779 function->shared()->formal_parameter_count() == arity;
2781 LPointerMap* pointers = instr->pointer_map();
2782 RecordPosition(pointers->position());
2784 if (can_invoke_directly) {
2785 if (rdi_state == RDI_UNINITIALIZED) {
2786 __ LoadHeapObject(
rdi,
function);
2790 bool change_context =
2791 (info()->closure()->context() !=
function->context()) ||
2792 scope()->contains_with() ||
2793 (scope()->num_heap_slots() > 0);
2794 if (change_context) {
2800 if (!function->NeedsArgumentsAdaption()) {
2805 __ SetCallKind(
rcx, call_kind);
2806 if (*
function == *info()->closure()) {
2813 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT, 0);
2816 SafepointGenerator generator(
2817 this, pointers, Safepoint::kLazyDeopt);
2818 ParameterCount count(arity);
2819 __ InvokeFunction(
function, count,
CALL_FUNCTION, generator, call_kind);
2827 void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) {
2829 CallKnownFunction(instr->function(),
2837 void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr) {
2838 Register input_reg =
ToRegister(instr->InputAt(0));
2840 Heap::kHeapNumberMapRootIndex);
2841 DeoptimizeIf(
not_equal, instr->environment());
2844 Register tmp = input_reg.is(
rax) ?
rcx :
rax;
2848 PushSafepointRegistersScope scope(
this);
2862 Label allocated, slow;
2863 __ AllocateHeapNumber(tmp, tmp2, &slow);
2869 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr);
2876 __ LoadFromSafepointRegisterSlot(input_reg, input_reg);
2878 __ bind(&allocated);
2880 __ shl(tmp2, Immediate(1));
2881 __ shr(tmp2, Immediate(1));
2883 __ StoreToSafepointRegisterSlot(input_reg, tmp);
2889 void LCodeGen::EmitIntegerMathAbs(LUnaryMathOperation* instr) {
2890 Register input_reg =
ToRegister(instr->InputAt(0));
2891 __ testl(input_reg, input_reg);
2895 DeoptimizeIf(negative, instr->environment());
2896 __ bind(&is_positive);
2900 void LCodeGen::DoMathAbs(LUnaryMathOperation* instr) {
2902 class DeferredMathAbsTaggedHeapNumber:
public LDeferredCode {
2904 DeferredMathAbsTaggedHeapNumber(LCodeGen* codegen,
2905 LUnaryMathOperation* instr)
2906 : LDeferredCode(codegen), instr_(instr) { }
2907 virtual void Generate() {
2908 codegen()->DoDeferredMathAbsTaggedHeapNumber(instr_);
2910 virtual LInstruction* instr() {
return instr_; }
2912 LUnaryMathOperation* instr_;
2915 ASSERT(instr->InputAt(0)->Equals(instr->result()));
2916 Representation r = instr->hydrogen()->value()->representation();
2919 XMMRegister scratch =
xmm0;
2920 XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
2921 __ xorps(scratch, scratch);
2922 __ subsd(scratch, input_reg);
2923 __ andpd(input_reg, scratch);
2924 }
else if (r.IsInteger32()) {
2925 EmitIntegerMathAbs(instr);
2927 DeferredMathAbsTaggedHeapNumber* deferred =
2928 new(zone()) DeferredMathAbsTaggedHeapNumber(
this, instr);
2929 Register input_reg =
ToRegister(instr->InputAt(0));
2931 __ JumpIfNotSmi(input_reg, deferred->entry());
2932 __ SmiToInteger32(input_reg, input_reg);
2933 EmitIntegerMathAbs(instr);
2934 __ Integer32ToSmi(input_reg, input_reg);
2935 __ bind(deferred->exit());
2940 void LCodeGen::DoMathFloor(LUnaryMathOperation* instr) {
2941 XMMRegister xmm_scratch =
xmm0;
2942 Register output_reg =
ToRegister(instr->result());
2943 XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
2947 CpuFeatures::Scope scope(
SSE4_1);
2950 __ movq(output_reg, input_reg);
2951 __ subq(output_reg, Immediate(1));
2952 DeoptimizeIf(
overflow, instr->environment());
2955 __ cvttsd2si(output_reg, xmm_scratch);
2956 __ cmpl(output_reg, Immediate(0x80000000));
2957 DeoptimizeIf(
equal, instr->environment());
2960 __ xorps(xmm_scratch, xmm_scratch);
2961 __ ucomisd(input_reg, xmm_scratch);
2962 DeoptimizeIf(
below, instr->environment());
2965 Label positive_sign;
2966 __ j(
above, &positive_sign, Label::kNear);
2967 __ movmskpd(output_reg, input_reg);
2968 __ testq(output_reg, Immediate(1));
2969 DeoptimizeIf(
not_zero, instr->environment());
2970 __ Set(output_reg, 0);
2972 __ bind(&positive_sign);
2976 __ cvttsd2si(output_reg, input_reg);
2979 __ cmpl(output_reg, Immediate(0x80000000));
2980 DeoptimizeIf(
equal, instr->environment());
2986 void LCodeGen::DoMathRound(LUnaryMathOperation* instr) {
2987 const XMMRegister xmm_scratch =
xmm0;
2988 Register output_reg =
ToRegister(instr->result());
2989 XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
2996 __ ucomisd(xmm_scratch, input_reg);
2998 __ j(
above, &below_half, Label::kNear);
3003 __ addsd(xmm_scratch, input_reg);
3006 __ cvttsd2si(output_reg, xmm_scratch);
3008 __ cmpl(output_reg, Immediate(0x80000000));
3009 DeoptimizeIf(
equal, instr->environment());
3012 __ bind(&below_half);
3015 __ movq(output_reg, input_reg);
3016 __ testq(output_reg, output_reg);
3017 DeoptimizeIf(negative, instr->environment());
3024 __ ucomisd(input_reg, xmm_scratch);
3025 DeoptimizeIf(
below, instr->environment());
3027 __ xorl(output_reg, output_reg);
3033 void LCodeGen::DoMathSqrt(LUnaryMathOperation* instr) {
3034 XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
3035 ASSERT(ToDoubleRegister(instr->result()).is(input_reg));
3036 __ sqrtsd(input_reg, input_reg);
3040 void LCodeGen::DoMathPowHalf(LUnaryMathOperation* instr) {
3041 XMMRegister xmm_scratch =
xmm0;
3042 XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
3043 ASSERT(ToDoubleRegister(instr->result()).is(input_reg));
3053 __ ucomisd(xmm_scratch, input_reg);
3057 __ j(
carry, &sqrt, Label::kNear);
3059 __ xorps(input_reg, input_reg);
3060 __ subsd(input_reg, xmm_scratch);
3061 __ jmp(&done, Label::kNear);
3065 __ xorps(xmm_scratch, xmm_scratch);
3066 __ addsd(input_reg, xmm_scratch);
3067 __ sqrtsd(input_reg, input_reg);
3072 void LCodeGen::DoPower(LPower* instr) {
3073 Representation exponent_type = instr->hydrogen()->right()->representation();
3079 Register exponent =
rdx;
3081 Register exponent =
rdi;
3083 ASSERT(!instr->InputAt(1)->IsRegister() ||
3085 ASSERT(!instr->InputAt(1)->IsDoubleRegister() ||
3086 ToDoubleRegister(instr->InputAt(1)).is(
xmm1));
3087 ASSERT(ToDoubleRegister(instr->InputAt(0)).is(
xmm2));
3088 ASSERT(ToDoubleRegister(instr->result()).is(
xmm3));
3090 if (exponent_type.IsTagged()) {
3092 __ JumpIfSmi(exponent, &no_deopt);
3094 DeoptimizeIf(
not_equal, instr->environment());
3098 }
else if (exponent_type.IsInteger32()) {
3102 ASSERT(exponent_type.IsDouble());
3109 void LCodeGen::DoRandom(LRandom* instr) {
3110 class DeferredDoRandom:
public LDeferredCode {
3112 DeferredDoRandom(LCodeGen* codegen, LRandom* instr)
3113 : LDeferredCode(codegen), instr_(instr) { }
3114 virtual void Generate() { codegen()->DoDeferredRandom(instr_); }
3115 virtual LInstruction* instr() {
return instr_; }
3120 DeferredDoRandom* deferred =
new(zone()) DeferredDoRandom(
this, instr);
3124 ASSERT(ToDoubleRegister(instr->result()).is(
xmm1));
3130 Register global_object =
rcx;
3133 Register global_object =
rdi;
3136 static const int kSeedSize =
sizeof(uint32_t);
3139 __ movq(global_object,
3141 static const int kRandomSeedOffset =
3150 __ j(
zero, deferred->entry());
3157 __ andl(
rdx, Immediate(0xFFFF));
3158 __ imull(
rdx,
rdx, Immediate(18273));
3159 __ shrl(
rax, Immediate(16));
3166 __ andl(
rdx, Immediate(0xFFFF));
3167 __ imull(
rdx,
rdx, Immediate(36969));
3168 __ shrl(
rcx, Immediate(16));
3174 __ shll(
rax, Immediate(14));
3175 __ andl(
rcx, Immediate(0x3FFFF));
3178 __ bind(deferred->exit());
3182 __ movl(
rcx, Immediate(0x49800000));
3191 void LCodeGen::DoDeferredRandom(LRandom* instr) {
3192 __ PrepareCallCFunction(1);
3193 __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1);
3199 void LCodeGen::DoMathLog(LUnaryMathOperation* instr) {
3200 ASSERT(ToDoubleRegister(instr->result()).is(
xmm1));
3203 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3207 void LCodeGen::DoMathTan(LUnaryMathOperation* instr) {
3208 ASSERT(ToDoubleRegister(instr->result()).is(
xmm1));
3211 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3215 void LCodeGen::DoMathCos(LUnaryMathOperation* instr) {
3216 ASSERT(ToDoubleRegister(instr->result()).is(
xmm1));
3219 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3223 void LCodeGen::DoMathSin(LUnaryMathOperation* instr) {
3224 ASSERT(ToDoubleRegister(instr->result()).is(
xmm1));
3227 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3231 void LCodeGen::DoUnaryMathOperation(LUnaryMathOperation* instr) {
3232 switch (instr->op()) {
3246 DoMathPowHalf(instr);
3267 void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) {
3269 ASSERT(instr->HasPointerMap());
3271 if (instr->known_function().is_null()) {
3272 LPointerMap* pointers = instr->pointer_map();
3273 RecordPosition(pointers->position());
3274 SafepointGenerator generator(
this, pointers, Safepoint::kLazyDeopt);
3275 ParameterCount count(instr->arity());
3279 CallKnownFunction(instr->known_function(),
3283 RDI_CONTAINS_TARGET);
3288 void LCodeGen::DoCallKeyed(LCallKeyed* instr) {
3292 int arity = instr->arity();
3294 isolate()->stub_cache()->ComputeKeyedCallInitialize(arity);
3295 CallCode(ic, RelocInfo::CODE_TARGET, instr);
3300 void LCodeGen::DoCallNamed(LCallNamed* instr) {
3303 int arity = instr->arity();
3304 RelocInfo::Mode mode = RelocInfo::CODE_TARGET;
3306 isolate()->stub_cache()->ComputeCallInitialize(arity, mode);
3307 __ Move(
rcx, instr->name());
3308 CallCode(ic, mode, instr);
3313 void LCodeGen::DoCallFunction(LCallFunction* instr) {
3317 int arity = instr->arity();
3319 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3324 void LCodeGen::DoCallGlobal(LCallGlobal* instr) {
3326 int arity = instr->arity();
3327 RelocInfo::Mode mode = RelocInfo::CODE_TARGET_CONTEXT;
3329 isolate()->stub_cache()->ComputeCallInitialize(arity, mode);
3330 __ Move(
rcx, instr->name());
3331 CallCode(ic, mode, instr);
3336 void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) {
3338 CallKnownFunction(instr->target(),
3346 void LCodeGen::DoCallNew(LCallNew* instr) {
3351 __ Set(
rax, instr->arity());
3352 CallCode(stub.GetCode(), RelocInfo::CONSTRUCT_CALL, instr);
3356 void LCodeGen::DoCallRuntime(LCallRuntime* instr) {
3357 CallRuntime(instr->function(), instr->arity(), instr);
3361 void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) {
3362 Register
object =
ToRegister(instr->object());
3364 int offset = instr->offset();
3366 if (!instr->transition().is_null()) {
3367 if (!instr->hydrogen()->NeedsWriteBarrierForMap()) {
3369 instr->transition());
3371 Register temp =
ToRegister(instr->TempAt(0));
3375 __ RecordWriteField(
object,
3386 HType type = instr->hydrogen()->value()->type();
3389 if (instr->is_in_object()) {
3391 if (instr->hydrogen()->NeedsWriteBarrier()) {
3392 Register temp =
ToRegister(instr->TempAt(0));
3394 __ RecordWriteField(
object,
3403 Register temp =
ToRegister(instr->TempAt(0));
3406 if (instr->hydrogen()->NeedsWriteBarrier()) {
3409 __ RecordWriteField(temp,
3421 void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) {
3425 __ Move(
rcx, instr->hydrogen()->name());
3426 Handle<Code> ic = (instr->strict_mode_flag() ==
kStrictMode)
3427 ? isolate()->builtins()->StoreIC_Initialize_Strict()
3428 : isolate()->builtins()->StoreIC_Initialize();
3429 CallCode(ic, RelocInfo::CODE_TARGET, instr);
3433 void LCodeGen::DoStoreKeyedSpecializedArrayElement(
3434 LStoreKeyedSpecializedArrayElement* instr) {
3436 Operand operand(BuildFastArrayOperand(instr->external_pointer(),
3440 instr->additional_index()));
3442 if (instr->hydrogen()->IsDehoisted() && !instr->key()->IsConstantOperand()) {
3446 __ movsxlq(key_reg, key_reg);
3450 XMMRegister value(ToDoubleRegister(instr->value()));
3451 __ cvtsd2ss(value, value);
3452 __ movss(operand, value);
3454 __ movsd(operand, ToDoubleRegister(instr->value()));
3457 switch (elements_kind) {
3461 __ movb(operand, value);
3465 __ movw(operand, value);
3469 __ movl(operand, value);
3488 void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) {
3489 if (instr->length()->IsRegister()) {
3491 if (FLAG_debug_code) {
3492 __ AbortIfNotZeroExtended(reg);
3494 if (instr->index()->IsConstantOperand()) {
3499 if (FLAG_debug_code) {
3500 __ AbortIfNotZeroExtended(reg2);
3505 if (instr->index()->IsConstantOperand()) {
3506 __ cmpq(ToOperand(instr->length()),
3509 __ cmpq(ToOperand(instr->length()),
ToRegister(instr->index()));
3516 void LCodeGen::DoStoreKeyedFastElement(LStoreKeyedFastElement* instr) {
3518 Register elements =
ToRegister(instr->object());
3519 Register key = instr->key()->IsRegister() ?
ToRegister(instr->key()) :
no_reg;
3522 BuildFastArrayOperand(instr->object(),
3526 instr->additional_index());
3528 if (instr->hydrogen()->IsDehoisted() && !instr->key()->IsConstantOperand()) {
3532 __ movsxlq(key_reg, key_reg);
3535 __ movq(operand, value);
3537 if (instr->hydrogen()->NeedsWriteBarrier()) {
3538 ASSERT(!instr->key()->IsConstantOperand());
3539 HType type = instr->hydrogen()->value()->type();
3543 __ lea(key, operand);
3544 __ RecordWrite(elements,
3554 void LCodeGen::DoStoreKeyedFastDoubleElement(
3555 LStoreKeyedFastDoubleElement* instr) {
3556 XMMRegister value = ToDoubleRegister(instr->value());
3558 if (instr->NeedsCanonicalization()) {
3561 __ ucomisd(value, value);
3568 __ bind(&have_value);
3571 Operand double_store_operand = BuildFastArrayOperand(
3576 instr->additional_index());
3578 if (instr->hydrogen()->IsDehoisted() && !instr->key()->IsConstantOperand()) {
3582 __ movsxlq(key_reg, key_reg);
3585 __ movsd(double_store_operand, value);
3588 void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) {
3593 Handle<Code> ic = (instr->strict_mode_flag() ==
kStrictMode)
3594 ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
3595 : isolate()->builtins()->KeyedStoreIC_Initialize();
3596 CallCode(ic, RelocInfo::CODE_TARGET, instr);
3600 void LCodeGen::DoTransitionElementsKind(LTransitionElementsKind* instr) {
3601 Register object_reg =
ToRegister(instr->object());
3602 Register new_map_reg =
ToRegister(instr->new_map_reg());
3604 Handle<Map> from_map = instr->original_map();
3605 Handle<Map> to_map = instr->transitioned_map();
3609 Label not_applicable;
3612 __ movq(new_map_reg, to_map, RelocInfo::EMBEDDED_OBJECT);
3621 Register fixed_object_reg =
ToRegister(instr->temp_reg());
3624 __ movq(fixed_object_reg, object_reg);
3625 CallCode(isolate()->builtins()->TransitionElementsSmiToDouble(),
3626 RelocInfo::CODE_TARGET, instr);
3629 Register fixed_object_reg =
ToRegister(instr->temp_reg());
3632 __ movq(fixed_object_reg, object_reg);
3633 CallCode(isolate()->builtins()->TransitionElementsDoubleToObject(),
3634 RelocInfo::CODE_TARGET, instr);
3638 __ bind(¬_applicable);
3642 void LCodeGen::DoStringAdd(LStringAdd* instr) {
3643 EmitPushTaggedOperand(instr->left());
3644 EmitPushTaggedOperand(instr->right());
3646 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3650 void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) {
3651 class DeferredStringCharCodeAt:
public LDeferredCode {
3653 DeferredStringCharCodeAt(LCodeGen* codegen, LStringCharCodeAt* instr)
3654 : LDeferredCode(codegen), instr_(instr) { }
3655 virtual void Generate() { codegen()->DoDeferredStringCharCodeAt(instr_); }
3656 virtual LInstruction* instr() {
return instr_; }
3658 LStringCharCodeAt* instr_;
3661 DeferredStringCharCodeAt* deferred =
3662 new(zone()) DeferredStringCharCodeAt(
this, instr);
3669 __ bind(deferred->exit());
3673 void LCodeGen::DoDeferredStringCharCodeAt(LStringCharCodeAt* instr) {
3674 Register
string =
ToRegister(instr->string());
3675 Register result =
ToRegister(instr->result());
3682 PushSafepointRegistersScope scope(
this);
3687 if (instr->index()->IsConstantOperand()) {
3692 __ Integer32ToSmi(index, index);
3695 CallRuntimeFromDeferred(Runtime::kStringCharCodeAt, 2, instr);
3696 if (FLAG_debug_code) {
3697 __ AbortIfNotSmi(
rax);
3700 __ StoreToSafepointRegisterSlot(result,
rax);
3704 void LCodeGen::DoStringCharFromCode(LStringCharFromCode* instr) {
3705 class DeferredStringCharFromCode:
public LDeferredCode {
3707 DeferredStringCharFromCode(LCodeGen* codegen, LStringCharFromCode* instr)
3708 : LDeferredCode(codegen), instr_(instr) { }
3709 virtual void Generate() { codegen()->DoDeferredStringCharFromCode(instr_); }
3710 virtual LInstruction* instr() {
return instr_; }
3712 LStringCharFromCode* instr_;
3715 DeferredStringCharFromCode* deferred =
3716 new(zone()) DeferredStringCharFromCode(
this, instr);
3718 ASSERT(instr->hydrogen()->value()->representation().IsInteger32());
3719 Register char_code =
ToRegister(instr->char_code());
3720 Register result =
ToRegister(instr->result());
3721 ASSERT(!char_code.is(result));
3724 __ j(
above, deferred->entry());
3725 __ LoadRoot(result, Heap::kSingleCharacterStringCacheRootIndex);
3729 __ CompareRoot(result, Heap::kUndefinedValueRootIndex);
3730 __ j(
equal, deferred->entry());
3731 __ bind(deferred->exit());
3735 void LCodeGen::DoDeferredStringCharFromCode(LStringCharFromCode* instr) {
3736 Register char_code =
ToRegister(instr->char_code());
3737 Register result =
ToRegister(instr->result());
3744 PushSafepointRegistersScope scope(
this);
3745 __ Integer32ToSmi(char_code, char_code);
3747 CallRuntimeFromDeferred(Runtime::kCharFromCode, 1, instr);
3748 __ StoreToSafepointRegisterSlot(result,
rax);
3752 void LCodeGen::DoStringLength(LStringLength* instr) {
3753 Register
string =
ToRegister(instr->string());
3754 Register result =
ToRegister(instr->result());
3759 void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) {
3760 LOperand* input = instr->InputAt(0);
3761 ASSERT(input->IsRegister() || input->IsStackSlot());
3762 LOperand* output = instr->result();
3763 ASSERT(output->IsDoubleRegister());
3764 if (input->IsRegister()) {
3765 __ cvtlsi2sd(ToDoubleRegister(output),
ToRegister(input));
3767 __ cvtlsi2sd(ToDoubleRegister(output), ToOperand(input));
3772 void LCodeGen::DoNumberTagI(LNumberTagI* instr) {
3773 LOperand* input = instr->InputAt(0);
3774 ASSERT(input->IsRegister() && input->Equals(instr->result()));
3777 __ Integer32ToSmi(reg, reg);
3781 void LCodeGen::DoNumberTagD(LNumberTagD* instr) {
3782 class DeferredNumberTagD:
public LDeferredCode {
3784 DeferredNumberTagD(LCodeGen* codegen, LNumberTagD* instr)
3785 : LDeferredCode(codegen), instr_(instr) { }
3786 virtual void Generate() { codegen()->DoDeferredNumberTagD(instr_); }
3787 virtual LInstruction* instr() {
return instr_; }
3789 LNumberTagD* instr_;
3792 XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
3796 DeferredNumberTagD* deferred =
new(zone()) DeferredNumberTagD(
this, instr);
3797 if (FLAG_inline_new) {
3798 __ AllocateHeapNumber(reg, tmp, deferred->entry());
3800 __ jmp(deferred->entry());
3802 __ bind(deferred->exit());
3807 void LCodeGen::DoDeferredNumberTagD(LNumberTagD* instr) {
3815 PushSafepointRegistersScope scope(
this);
3816 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr);
3824 void LCodeGen::DoSmiTag(LSmiTag* instr) {
3825 ASSERT(instr->InputAt(0)->Equals(instr->result()));
3826 Register input =
ToRegister(instr->InputAt(0));
3828 __ Integer32ToSmi(input, input);
3832 void LCodeGen::DoSmiUntag(LSmiUntag* instr) {
3833 ASSERT(instr->InputAt(0)->Equals(instr->result()));
3834 Register input =
ToRegister(instr->InputAt(0));
3835 if (instr->needs_check()) {
3839 if (FLAG_debug_code) {
3840 __ AbortIfNotSmi(input);
3843 __ SmiToInteger32(input, input);
3847 void LCodeGen::EmitNumberUntagD(Register input_reg,
3848 XMMRegister result_reg,
3849 bool deoptimize_on_undefined,
3850 bool deoptimize_on_minus_zero,
3851 LEnvironment* env) {
3852 Label load_smi, done;
3855 __ JumpIfSmi(input_reg, &load_smi, Label::kNear);
3859 Heap::kHeapNumberMapRootIndex);
3860 if (deoptimize_on_undefined) {
3864 __ j(
equal, &heap_number, Label::kNear);
3866 __ CompareRoot(input_reg, Heap::kUndefinedValueRootIndex);
3870 __ xorps(result_reg, result_reg);
3871 __ divsd(result_reg, result_reg);
3872 __ jmp(&done, Label::kNear);
3874 __ bind(&heap_number);
3878 if (deoptimize_on_minus_zero) {
3879 XMMRegister xmm_scratch =
xmm0;
3880 __ xorps(xmm_scratch, xmm_scratch);
3881 __ ucomisd(xmm_scratch, result_reg);
3887 __ jmp(&done, Label::kNear);
3897 void LCodeGen::DoDeferredTaggedToI(LTaggedToI* instr) {
3898 Label done, heap_number;
3899 Register input_reg =
ToRegister(instr->InputAt(0));
3903 Heap::kHeapNumberMapRootIndex);
3905 if (instr->truncating()) {
3906 __ j(
equal, &heap_number, Label::kNear);
3909 __ CompareRoot(input_reg, Heap::kUndefinedValueRootIndex);
3910 DeoptimizeIf(
not_equal, instr->environment());
3911 __ Set(input_reg, 0);
3912 __ jmp(&done, Label::kNear);
3914 __ bind(&heap_number);
3917 __ cvttsd2siq(input_reg,
xmm0);
3920 DeoptimizeIf(
equal, instr->environment());
3923 DeoptimizeIf(
not_equal, instr->environment());
3925 XMMRegister xmm_temp = ToDoubleRegister(instr->TempAt(0));
3927 __ cvttsd2si(input_reg,
xmm0);
3928 __ cvtlsi2sd(xmm_temp, input_reg);
3929 __ ucomisd(
xmm0, xmm_temp);
3930 DeoptimizeIf(
not_equal, instr->environment());
3933 __ testl(input_reg, input_reg);
3935 __ movmskpd(input_reg,
xmm0);
3936 __ andl(input_reg, Immediate(1));
3937 DeoptimizeIf(
not_zero, instr->environment());
3944 void LCodeGen::DoTaggedToI(LTaggedToI* instr) {
3945 class DeferredTaggedToI:
public LDeferredCode {
3947 DeferredTaggedToI(LCodeGen* codegen, LTaggedToI* instr)
3948 : LDeferredCode(codegen), instr_(instr) { }
3949 virtual void Generate() { codegen()->DoDeferredTaggedToI(instr_); }
3950 virtual LInstruction* instr() {
return instr_; }
3955 LOperand* input = instr->InputAt(0);
3956 ASSERT(input->IsRegister());
3957 ASSERT(input->Equals(instr->result()));
3960 DeferredTaggedToI* deferred =
new(zone()) DeferredTaggedToI(
this, instr);
3961 __ JumpIfNotSmi(input_reg, deferred->entry());
3962 __ SmiToInteger32(input_reg, input_reg);
3963 __ bind(deferred->exit());
3967 void LCodeGen::DoNumberUntagD(LNumberUntagD* instr) {
3968 LOperand* input = instr->InputAt(0);
3969 ASSERT(input->IsRegister());
3970 LOperand* result = instr->result();
3971 ASSERT(result->IsDoubleRegister());
3974 XMMRegister result_reg = ToDoubleRegister(result);
3976 EmitNumberUntagD(input_reg, result_reg,
3977 instr->hydrogen()->deoptimize_on_undefined(),
3978 instr->hydrogen()->deoptimize_on_minus_zero(),
3979 instr->environment());
3983 void LCodeGen::DoDoubleToI(LDoubleToI* instr) {
3984 LOperand* input = instr->InputAt(0);
3985 ASSERT(input->IsDoubleRegister());
3986 LOperand* result = instr->result();
3987 ASSERT(result->IsRegister());
3989 XMMRegister input_reg = ToDoubleRegister(input);
3992 if (instr->truncating()) {
3995 __ cvttsd2siq(result_reg, input_reg);
3998 DeoptimizeIf(
equal, instr->environment());
4000 __ cvttsd2si(result_reg, input_reg);
4001 __ cvtlsi2sd(
xmm0, result_reg);
4002 __ ucomisd(
xmm0, input_reg);
4003 DeoptimizeIf(
not_equal, instr->environment());
4009 __ testl(result_reg, result_reg);
4011 __ movmskpd(result_reg, input_reg);
4015 __ andl(result_reg, Immediate(1));
4016 DeoptimizeIf(
not_zero, instr->environment());
4023 void LCodeGen::DoCheckSmi(LCheckSmi* instr) {
4024 LOperand* input = instr->InputAt(0);
4030 void LCodeGen::DoCheckNonSmi(LCheckNonSmi* instr) {
4031 LOperand* input = instr->InputAt(0);
4033 DeoptimizeIf(cc, instr->environment());
4037 void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) {
4038 Register input =
ToRegister(instr->InputAt(0));
4042 if (instr->hydrogen()->is_interval_check()) {
4045 instr->hydrogen()->GetCheckInterval(&first, &last);
4048 Immediate(static_cast<int8_t>(first)));
4051 if (first == last) {
4052 DeoptimizeIf(
not_equal, instr->environment());
4054 DeoptimizeIf(
below, instr->environment());
4058 Immediate(static_cast<int8_t>(last)));
4059 DeoptimizeIf(
above, instr->environment());
4065 instr->hydrogen()->GetCheckMaskAndTag(&mask, &tag);
4071 DeoptimizeIf(tag == 0 ?
not_zero :
zero, instr->environment());
4077 DeoptimizeIf(
not_equal, instr->environment());
4083 void LCodeGen::DoCheckFunction(LCheckFunction* instr) {
4085 Handle<JSFunction> target = instr->hydrogen()->target();
4086 if (isolate()->heap()->InNewSpace(*target)) {
4087 Handle<JSGlobalPropertyCell> cell =
4088 isolate()->factory()->NewJSGlobalPropertyCell(target);
4092 __ Cmp(reg, target);
4094 DeoptimizeIf(
not_equal, instr->environment());
4098 void LCodeGen::DoCheckMapCommon(Register reg,
4101 LEnvironment* env) {
4103 __ CompareMap(reg, map, &success, mode);
4109 void LCodeGen::DoCheckMaps(LCheckMaps* instr) {
4110 LOperand* input = instr->InputAt(0);
4111 ASSERT(input->IsRegister());
4115 SmallMapList* map_set = instr->hydrogen()->map_set();
4116 for (
int i = 0; i < map_set->length() - 1; i++) {
4117 Handle<Map> map = map_set->at(i);
4121 Handle<Map> map = map_set->last();
4127 void LCodeGen::DoClampDToUint8(LClampDToUint8* instr) {
4128 XMMRegister value_reg = ToDoubleRegister(instr->unclamped());
4129 Register result_reg =
ToRegister(instr->result());
4130 Register temp_reg =
ToRegister(instr->TempAt(0));
4131 __ ClampDoubleToUint8(value_reg,
xmm0, result_reg, temp_reg);
4135 void LCodeGen::DoClampIToUint8(LClampIToUint8* instr) {
4136 ASSERT(instr->unclamped()->Equals(instr->result()));
4137 Register value_reg =
ToRegister(instr->result());
4138 __ ClampUint8(value_reg);
4142 void LCodeGen::DoClampTToUint8(LClampTToUint8* instr) {
4143 ASSERT(instr->unclamped()->Equals(instr->result()));
4144 Register input_reg =
ToRegister(instr->unclamped());
4145 Register temp_reg =
ToRegister(instr->TempAt(0));
4146 XMMRegister temp_xmm_reg = ToDoubleRegister(instr->TempAt(1));
4147 Label is_smi, done, heap_number;
4149 __ JumpIfSmi(input_reg, &is_smi);
4153 factory()->heap_number_map());
4154 __ j(
equal, &heap_number, Label::kNear);
4158 __ Cmp(input_reg, factory()->undefined_value());
4159 DeoptimizeIf(
not_equal, instr->environment());
4160 __ movq(input_reg, Immediate(0));
4161 __ jmp(&done, Label::kNear);
4164 __ bind(&heap_number);
4166 __ ClampDoubleToUint8(
xmm0, temp_xmm_reg, input_reg, temp_reg);
4167 __ jmp(&done, Label::kNear);
4171 __ SmiToInteger32(input_reg, input_reg);
4172 __ ClampUint8(input_reg);
4178 void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) {
4181 Handle<JSObject> holder = instr->holder();
4182 Handle<JSObject> current_prototype = instr->prototype();
4185 __ LoadHeapObject(reg, current_prototype);
4188 while (!current_prototype.is_identical_to(holder)) {
4189 DoCheckMapCommon(reg, Handle<Map>(current_prototype->map()),
4192 Handle<JSObject>(
JSObject::cast(current_prototype->GetPrototype()));
4194 __ LoadHeapObject(reg, current_prototype);
4198 DoCheckMapCommon(reg, Handle<Map>(current_prototype->map()),
4203 void LCodeGen::DoAllocateObject(LAllocateObject* instr) {
4204 class DeferredAllocateObject:
public LDeferredCode {
4206 DeferredAllocateObject(LCodeGen* codegen, LAllocateObject* instr)
4207 : LDeferredCode(codegen), instr_(instr) { }
4208 virtual void Generate() { codegen()->DoDeferredAllocateObject(instr_); }
4209 virtual LInstruction* instr() {
return instr_; }
4211 LAllocateObject* instr_;
4214 DeferredAllocateObject* deferred =
4215 new(zone()) DeferredAllocateObject(
this, instr);
4217 Register result =
ToRegister(instr->result());
4218 Register scratch =
ToRegister(instr->TempAt(0));
4219 Handle<JSFunction> constructor = instr->hydrogen()->constructor();
4220 Handle<Map> initial_map(constructor->initial_map());
4221 int instance_size = initial_map->instance_size();
4222 ASSERT(initial_map->pre_allocated_property_fields() +
4223 initial_map->unused_property_fields() -
4224 initial_map->inobject_properties() == 0);
4229 ASSERT(!constructor->shared()->IsInobjectSlackTrackingInProgress());
4230 __ AllocateInNewSpace(instance_size,
4237 __ bind(deferred->exit());
4238 if (FLAG_debug_code) {
4239 Label is_in_new_space;
4240 __ JumpIfInNewSpace(result, scratch, &is_in_new_space);
4241 __ Abort(
"Allocated object is not in new-space");
4242 __ bind(&is_in_new_space);
4246 Register map = scratch;
4247 __ LoadHeapObject(scratch, constructor);
4250 if (FLAG_debug_code) {
4254 __ Assert(
equal,
"Unexpected instance size");
4256 Immediate(initial_map->pre_allocated_property_fields()));
4257 __ Assert(
equal,
"Unexpected pre-allocated property fields count");
4259 Immediate(initial_map->unused_property_fields()));
4260 __ Assert(
equal,
"Unexpected unused property fields count");
4262 Immediate(initial_map->inobject_properties()));
4263 __ Assert(
equal,
"Unexpected in-object property fields count");
4269 __ LoadRoot(scratch, Heap::kEmptyFixedArrayRootIndex);
4272 if (initial_map->inobject_properties() != 0) {
4273 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
4274 for (
int i = 0; i < initial_map->inobject_properties(); i++) {
4282 void LCodeGen::DoDeferredAllocateObject(LAllocateObject* instr) {
4283 Register result =
ToRegister(instr->result());
4284 Handle<JSFunction> constructor = instr->hydrogen()->constructor();
4285 Handle<Map> initial_map(constructor->initial_map());
4286 int instance_size = initial_map->instance_size();
4293 PushSafepointRegistersScope scope(
this);
4295 CallRuntimeFromDeferred(Runtime::kAllocateInNewSpace, 1, instr);
4296 __ StoreToSafepointRegisterSlot(result,
rax);
4300 void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
4301 Heap* heap = isolate()->heap();
4303 instr->hydrogen()->boilerplate_elements_kind();
4309 boilerplate_elements_kind,
true)) {
4310 __ LoadHeapObject(
rax, instr->hydrogen()->boilerplate_object());
4316 __ cmpb(
rbx, Immediate(boilerplate_elements_kind <<
4318 DeoptimizeIf(
not_equal, instr->environment());
4327 __ Push(Handle<FixedArray>(heap->empty_fixed_array()));
4330 int length = instr->hydrogen()->length();
4331 if (instr->hydrogen()->IsCopyOnWrite()) {
4332 ASSERT(instr->hydrogen()->depth() == 1);
4335 FastCloneShallowArrayStub stub(mode, length);
4336 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
4337 }
else if (instr->hydrogen()->depth() > 1) {
4338 CallRuntime(Runtime::kCreateArrayLiteral, 3, instr);
4340 CallRuntime(Runtime::kCreateArrayLiteralShallow, 3, instr);
4346 FastCloneShallowArrayStub stub(mode, length);
4347 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
4352 void LCodeGen::EmitDeepCopy(Handle<JSObject>
object,
4360 Handle<FixedArrayBase> elements(object->elements());
4361 bool has_elements = elements->length() > 0 &&
4362 elements->map() != isolate()->heap()->fixed_cow_array_map();
4366 int object_offset = *offset;
4367 int object_size =
object->map()->instance_size();
4368 int elements_offset = *offset + object_size;
4369 int elements_size = has_elements ? elements->Size() : 0;
4370 *offset += object_size + elements_size;
4373 ASSERT(object->properties()->length() == 0);
4374 int inobject_properties =
object->map()->inobject_properties();
4375 int header_size = object_size - inobject_properties *
kPointerSize;
4378 __ lea(
rcx, Operand(result, elements_offset));
4386 for (
int i = 0; i < inobject_properties; i++) {
4387 int total_offset = object_offset +
object->GetInObjectPropertyOffset(i);
4388 Handle<Object> value = Handle<Object>(
object->InObjectPropertyAt(i));
4389 if (value->IsJSObject()) {
4391 __ lea(
rcx, Operand(result, *offset));
4393 __ LoadHeapObject(source, value_object);
4394 EmitDeepCopy(value_object, result, source, offset);
4395 }
else if (value->IsHeapObject()) {
4396 __ LoadHeapObject(
rcx, Handle<HeapObject>::cast(value));
4406 __ LoadHeapObject(source, elements);
4413 int elements_length = elements->length();
4414 if (elements->IsFixedDoubleArray()) {
4415 Handle<FixedDoubleArray> double_array =
4417 for (
int i = 0; i < elements_length; i++) {
4418 int64_t value = double_array->get_representation(i);
4424 }
else if (elements->IsFixedArray()) {
4426 for (
int i = 0; i < elements_length; i++) {
4428 Handle<Object> value(fast_elements->get(i));
4429 if (value->IsJSObject()) {
4431 __ lea(
rcx, Operand(result, *offset));
4433 __ LoadHeapObject(source, value_object);
4434 EmitDeepCopy(value_object, result, source, offset);
4435 }
else if (value->IsHeapObject()) {
4436 __ LoadHeapObject(
rcx, Handle<HeapObject>::cast(value));
4450 void LCodeGen::DoFastLiteral(LFastLiteral* instr) {
4451 int size = instr->hydrogen()->total_size();
4453 instr->hydrogen()->boilerplate()->GetElementsKind();
4459 boilerplate_elements_kind,
true)) {
4460 __ LoadHeapObject(
rbx, instr->hydrogen()->boilerplate());
4466 __ cmpb(
rcx, Immediate(boilerplate_elements_kind <<
4468 DeoptimizeIf(
not_equal, instr->environment());
4473 Label allocated, runtime_allocate;
4477 __ bind(&runtime_allocate);
4479 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr);
4481 __ bind(&allocated);
4483 __ LoadHeapObject(
rbx, instr->hydrogen()->boilerplate());
4484 EmitDeepCopy(instr->hydrogen()->boilerplate(),
rax,
rbx, &offset);
4489 void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) {
4490 Handle<FixedArray> literals(instr->environment()->closure()->literals());
4491 Handle<FixedArray> constant_properties =
4492 instr->hydrogen()->constant_properties();
4495 __ PushHeapObject(literals);
4497 __ Push(constant_properties);
4498 int flags = instr->hydrogen()->fast_elements()
4501 flags |= instr->hydrogen()->has_function()
4507 int properties_count = constant_properties->length() / 2;
4508 if (instr->hydrogen()->depth() > 1) {
4509 CallRuntime(Runtime::kCreateObjectLiteral, 4, instr);
4512 CallRuntime(Runtime::kCreateObjectLiteralShallow, 4, instr);
4514 FastCloneShallowObjectStub stub(properties_count);
4515 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
4520 void LCodeGen::DoToFastProperties(LToFastProperties* instr) {
4523 CallRuntime(Runtime::kToFastProperties, 1, instr);
4527 void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) {
4536 int literal_offset = FixedArray::kHeaderSize +
4539 __ CompareRoot(
rbx, Heap::kUndefinedValueRootIndex);
4546 __ Push(instr->hydrogen()->pattern());
4547 __ Push(instr->hydrogen()->flags());
4548 CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr);
4551 __ bind(&materialized);
4553 Label allocated, runtime_allocate;
4557 __ bind(&runtime_allocate);
4560 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr);
4563 __ bind(&allocated);
4572 if ((size % (2 * kPointerSize)) != 0) {
4579 void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) {
4582 Handle<SharedFunctionInfo> shared_info = instr->shared_info();
4583 bool pretenure = instr->hydrogen()->pretenure();
4584 if (!pretenure && shared_info->num_literals() == 0) {
4585 FastNewClosureStub stub(shared_info->language_mode());
4586 __ Push(shared_info);
4587 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
4590 __ Push(shared_info);
4591 __ PushRoot(pretenure ?
4592 Heap::kTrueValueRootIndex :
4593 Heap::kFalseValueRootIndex);
4594 CallRuntime(Runtime::kNewClosure, 3, instr);
4599 void LCodeGen::DoTypeof(LTypeof* instr) {
4600 LOperand* input = instr->InputAt(0);
4601 EmitPushTaggedOperand(input);
4602 CallRuntime(Runtime::kTypeof, 1, instr);
4606 void LCodeGen::EmitPushTaggedOperand(LOperand* operand) {
4607 ASSERT(!operand->IsDoubleRegister());
4608 if (operand->IsConstantOperand()) {
4610 if (object->IsSmi()) {
4611 __ Push(Handle<Smi>::cast(
object));
4613 __ PushHeapObject(Handle<HeapObject>::cast(
object));
4615 }
else if (operand->IsRegister()) {
4618 __ push(ToOperand(operand));
4623 void LCodeGen::DoTypeofIsAndBranch(LTypeofIsAndBranch* instr) {
4624 Register input =
ToRegister(instr->InputAt(0));
4625 int true_block = chunk_->LookupDestination(instr->true_block_id());
4626 int false_block = chunk_->LookupDestination(instr->false_block_id());
4627 Label* true_label = chunk_->GetAssemblyLabel(true_block);
4628 Label* false_label = chunk_->GetAssemblyLabel(false_block);
4631 EmitTypeofIs(true_label, false_label, input, instr->type_literal());
4633 EmitBranch(true_block, false_block, final_branch_condition);
4638 Condition LCodeGen::EmitTypeofIs(Label* true_label,
4641 Handle<String> type_name) {
4643 if (type_name->Equals(heap()->number_symbol())) {
4644 __ JumpIfSmi(input, true_label);
4646 Heap::kHeapNumberMapRootIndex);
4648 final_branch_condition =
equal;
4650 }
else if (type_name->Equals(heap()->string_symbol())) {
4651 __ JumpIfSmi(input, false_label);
4656 final_branch_condition =
zero;
4658 }
else if (type_name->Equals(heap()->boolean_symbol())) {
4659 __ CompareRoot(input, Heap::kTrueValueRootIndex);
4661 __ CompareRoot(input, Heap::kFalseValueRootIndex);
4662 final_branch_condition =
equal;
4664 }
else if (FLAG_harmony_typeof && type_name->Equals(heap()->null_symbol())) {
4665 __ CompareRoot(input, Heap::kNullValueRootIndex);
4666 final_branch_condition =
equal;
4668 }
else if (type_name->Equals(heap()->undefined_symbol())) {
4669 __ CompareRoot(input, Heap::kUndefinedValueRootIndex);
4671 __ JumpIfSmi(input, false_label);
4678 }
else if (type_name->Equals(heap()->function_symbol())) {
4680 __ JumpIfSmi(input, false_label);
4684 final_branch_condition =
equal;
4686 }
else if (type_name->Equals(heap()->object_symbol())) {
4687 __ JumpIfSmi(input, false_label);
4688 if (!FLAG_harmony_typeof) {
4689 __ CompareRoot(input, Heap::kNullValueRootIndex);
4699 final_branch_condition =
zero;
4702 __ jmp(false_label);
4705 return final_branch_condition;
4709 void LCodeGen::DoIsConstructCallAndBranch(LIsConstructCallAndBranch* instr) {
4710 Register temp =
ToRegister(instr->TempAt(0));
4711 int true_block = chunk_->LookupDestination(instr->true_block_id());
4712 int false_block = chunk_->LookupDestination(instr->false_block_id());
4714 EmitIsConstructCall(temp);
4715 EmitBranch(true_block, false_block,
equal);
4719 void LCodeGen::EmitIsConstructCall(Register temp) {
4724 Label check_frame_marker;
4727 __ j(
not_equal, &check_frame_marker, Label::kNear);
4731 __ bind(&check_frame_marker);
4737 void LCodeGen::EnsureSpaceForLazyDeopt(
int space_needed) {
4740 int current_pc = masm()->pc_offset();
4741 if (current_pc < last_lazy_deopt_pc_ + space_needed) {
4742 int padding_size = last_lazy_deopt_pc_ + space_needed - current_pc;
4743 __ Nop(padding_size);
4748 void LCodeGen::DoLazyBailout(LLazyBailout* instr) {
4750 last_lazy_deopt_pc_ = masm()->pc_offset();
4751 ASSERT(instr->HasEnvironment());
4752 LEnvironment* env = instr->environment();
4753 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
4754 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
4758 void LCodeGen::DoDeoptimize(LDeoptimize* instr) {
4763 void LCodeGen::DoDeleteProperty(LDeleteProperty* instr) {
4764 LOperand* obj = instr->object();
4765 LOperand* key = instr->key();
4766 EmitPushTaggedOperand(obj);
4767 EmitPushTaggedOperand(key);
4768 ASSERT(instr->HasPointerMap());
4769 LPointerMap* pointers = instr->pointer_map();
4770 RecordPosition(pointers->position());
4774 SafepointGenerator safepoint_generator(
4775 this, pointers, Safepoint::kLazyDeopt);
4777 __ InvokeBuiltin(Builtins::DELETE,
CALL_FUNCTION, safepoint_generator);
4781 void LCodeGen::DoIn(LIn* instr) {
4782 LOperand* obj = instr->object();
4783 LOperand* key = instr->key();
4784 EmitPushTaggedOperand(key);
4785 EmitPushTaggedOperand(obj);
4786 ASSERT(instr->HasPointerMap());
4787 LPointerMap* pointers = instr->pointer_map();
4788 RecordPosition(pointers->position());
4789 SafepointGenerator safepoint_generator(
4790 this, pointers, Safepoint::kLazyDeopt);
4795 void LCodeGen::DoDeferredStackCheck(LStackCheck* instr) {
4796 PushSafepointRegistersScope scope(
this);
4798 __ CallRuntimeSaveDoubles(Runtime::kStackGuard);
4799 RecordSafepointWithLazyDeopt(instr, RECORD_SAFEPOINT_WITH_REGISTERS, 0);
4800 ASSERT(instr->HasEnvironment());
4801 LEnvironment* env = instr->environment();
4802 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
4806 void LCodeGen::DoStackCheck(LStackCheck* instr) {
4807 class DeferredStackCheck:
public LDeferredCode {
4809 DeferredStackCheck(LCodeGen* codegen, LStackCheck* instr)
4810 : LDeferredCode(codegen), instr_(instr) { }
4811 virtual void Generate() { codegen()->DoDeferredStackCheck(instr_); }
4812 virtual LInstruction* instr() {
return instr_; }
4814 LStackCheck* instr_;
4817 ASSERT(instr->HasEnvironment());
4818 LEnvironment* env = instr->environment();
4821 if (instr->hydrogen()->is_function_entry()) {
4824 __ CompareRoot(
rsp, Heap::kStackLimitRootIndex);
4826 StackCheckStub stub;
4827 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
4829 last_lazy_deopt_pc_ = masm()->pc_offset();
4831 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
4832 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
4834 ASSERT(instr->hydrogen()->is_backwards_branch());
4836 DeferredStackCheck* deferred_stack_check =
4837 new(zone()) DeferredStackCheck(
this, instr);
4838 __ CompareRoot(
rsp, Heap::kStackLimitRootIndex);
4839 __ j(
below, deferred_stack_check->entry());
4841 last_lazy_deopt_pc_ = masm()->pc_offset();
4842 __ bind(instr->done_label());
4843 deferred_stack_check->SetExit(instr->done_label());
4844 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
4852 void LCodeGen::DoOsrEntry(LOsrEntry* instr) {
4856 LEnvironment* environment = instr->environment();
4857 environment->SetSpilledRegisters(instr->SpilledRegisterArray(),
4858 instr->SpilledDoubleRegisterArray());
4862 ASSERT(!environment->HasBeenRegistered());
4863 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt);
4864 ASSERT(osr_pc_offset_ == -1);
4865 osr_pc_offset_ = masm()->pc_offset();
4869 void LCodeGen::DoForInPrepareMap(LForInPrepareMap* instr) {
4870 __ CompareRoot(
rax, Heap::kUndefinedValueRootIndex);
4871 DeoptimizeIf(
equal, instr->environment());
4873 Register null_value =
rdi;
4874 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
4875 __ cmpq(
rax, null_value);
4876 DeoptimizeIf(
equal, instr->environment());
4879 DeoptimizeIf(cc, instr->environment());
4885 Label use_cache, call_runtime;
4886 __ CheckEnumCache(null_value, &call_runtime);
4889 __ jmp(&use_cache, Label::kNear);
4892 __ bind(&call_runtime);
4894 CallRuntime(Runtime::kGetPropertyNamesFast, 1, instr);
4897 Heap::kMetaMapRootIndex);
4898 DeoptimizeIf(
not_equal, instr->environment());
4899 __ bind(&use_cache);
4903 void LCodeGen::DoForInCacheArray(LForInCacheArray* instr) {
4905 Register result =
ToRegister(instr->result());
4906 __ LoadInstanceDescriptors(map, result);
4911 Condition cc = masm()->CheckSmi(result);
4912 DeoptimizeIf(cc, instr->environment());
4916 void LCodeGen::DoCheckMapValue(LCheckMapValue* instr) {
4917 Register
object =
ToRegister(instr->value());
4920 DeoptimizeIf(
not_equal, instr->environment());
4924 void LCodeGen::DoLoadFieldByIndex(LLoadFieldByIndex* instr) {
4925 Register
object =
ToRegister(instr->object());
4928 Label out_of_object, done;
4929 __ SmiToInteger32(index, index);
4930 __ cmpl(index, Immediate(0));
4931 __ j(
less, &out_of_object);
4936 __ jmp(&done, Label::kNear);
4938 __ bind(&out_of_object);
4945 FixedArray::kHeaderSize - kPointerSize));
4954 #endif // V8_TARGET_ARCH_X64
static const int kCallerFPOffset
static const int kElementsKindMask
static const int kLengthOffset
static const int kBitFieldOffset
static LGap * cast(LInstruction *instr)
const intptr_t kSmiTagMask
static const int kCodeEntryOffset
static const int kMaxAsciiCharCode
static const int kPrototypeOrInitialMapOffset
const char * ToCString(const v8::String::Utf8Value &value)
static int SlotOffset(int index)
virtual void AfterCall() const
void PrintF(const char *format,...)
static Smi * FromInt(int value)
bool IsFastObjectElementsKind(ElementsKind kind)
value format" "after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false, "print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false, "print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false, "report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true, "garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true, "flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true, "use incremental marking") DEFINE_bool(incremental_marking_steps, true, "do incremental marking steps") DEFINE_bool(trace_incremental_marking, false, "trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true, "Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false, "Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true, "use inline caching") DEFINE_bool(native_code_counters, false, "generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false, "Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true, "Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false, "Never perform compaction on full GC-testing only") DEFINE_bool(compact_code_space, true, "Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true, "Flush inline caches prior to mark compact collection and" "flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0, "Default seed for initializing random generator" "(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true, "allows verbose printing") DEFINE_bool(allow_natives_syntax, false, "allow natives syntax") DEFINE_bool(trace_sim, false, "Trace simulator execution") DEFINE_bool(check_icache, false, "Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0, "Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8, "Stack alingment in bytes in simulator(4 or 8, 8 is default)") DEFINE_bool(trace_exception, false, "print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false, "preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true, "randomize hashes to avoid predictable hash collisions" "(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0, "Fixed seed to use to hash property keys(0 means random)" "(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false, "activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true, "generate optimized regexp code") DEFINE_bool(testing_bool_flag, true, "testing_bool_flag") DEFINE_int(testing_int_flag, 13, "testing_int_flag") DEFINE_float(testing_float_flag, 2.5, "float-flag") DEFINE_string(testing_string_flag, "Hello, world!", "string-flag") DEFINE_int(testing_prng_seed, 42, "Seed used for threading test randomness") DEFINE_string(testing_serialization_file, "/tmp/serdes", "file in which to serialize heap") DEFINE_bool(help, false, "Print usage message, including flags, on console") DEFINE_bool(dump_counters, false, "Dump counters on exit") DEFINE_string(map_counters, "", "Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT, "Pass all remaining arguments to the script.Alias for\"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#43"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2#define FLAG_MODE_DEFINE_DEFAULTS#1"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flag-definitions.h"1#define FLAG_FULL(ftype, ctype, nam, def, cmt)#define FLAG_READONLY(ftype, ctype, nam, def, cmt)#define DEFINE_implication(whenflag, thenflag)#define DEFINE_bool(nam, def, cmt)#define DEFINE_int(nam, def, cmt)#define DEFINE_float(nam, def, cmt)#define DEFINE_string(nam, def, cmt)#define DEFINE_args(nam, def, cmt)#define FLAG DEFINE_bool(use_strict, false,"enforce strict mode") DEFINE_bool(es5_readonly, false,"activate correct semantics for inheriting readonliness") DEFINE_bool(es52_globals, false,"activate new semantics for global var declarations") DEFINE_bool(harmony_typeof, false,"enable harmony semantics for typeof") DEFINE_bool(harmony_scoping, false,"enable harmony block scoping") DEFINE_bool(harmony_modules, false,"enable harmony modules (implies block scoping)") DEFINE_bool(harmony_proxies, false,"enable harmony proxies") DEFINE_bool(harmony_collections, false,"enable harmony collections (sets, maps, and weak maps)") DEFINE_bool(harmony, false,"enable all harmony features (except typeof)") DEFINE_implication(harmony, harmony_scoping) DEFINE_implication(harmony, harmony_modules) DEFINE_implication(harmony, harmony_proxies) DEFINE_implication(harmony, harmony_collections) DEFINE_implication(harmony_modules, harmony_scoping) DEFINE_bool(packed_arrays, false,"optimizes arrays that have no holes") DEFINE_bool(smi_only_arrays, true,"tracks arrays with only smi values") DEFINE_bool(clever_optimizations, true,"Optimize object size, Array shift, DOM strings and string +") DEFINE_bool(unbox_double_arrays, true,"automatically unbox arrays of doubles") DEFINE_bool(string_slices, true,"use string slices") DEFINE_bool(crankshaft, true,"use crankshaft") DEFINE_string(hydrogen_filter,"","optimization filter") DEFINE_bool(use_range, true,"use hydrogen range analysis") DEFINE_bool(eliminate_dead_phis, true,"eliminate dead phis") DEFINE_bool(use_gvn, true,"use hydrogen global value numbering") DEFINE_bool(use_canonicalizing, true,"use hydrogen instruction canonicalizing") DEFINE_bool(use_inlining, true,"use function inlining") DEFINE_int(max_inlined_source_size, 600,"maximum source size in bytes considered for a single inlining") DEFINE_int(max_inlined_nodes, 196,"maximum number of AST nodes considered for a single inlining") DEFINE_int(max_inlined_nodes_cumulative, 196,"maximum cumulative number of AST nodes considered for inlining") DEFINE_bool(loop_invariant_code_motion, true,"loop invariant code motion") DEFINE_bool(collect_megamorphic_maps_from_stub_cache, true,"crankshaft harvests type feedback from stub cache") DEFINE_bool(hydrogen_stats, false,"print statistics for hydrogen") DEFINE_bool(trace_hydrogen, false,"trace generated hydrogen to file") DEFINE_string(trace_phase,"Z","trace generated IR for specified phases") DEFINE_bool(trace_inlining, false,"trace inlining decisions") DEFINE_bool(trace_alloc, false,"trace register allocator") DEFINE_bool(trace_all_uses, false,"trace all use positions") DEFINE_bool(trace_range, false,"trace range analysis") DEFINE_bool(trace_gvn, false,"trace global value numbering") DEFINE_bool(trace_representation, false,"trace representation types") DEFINE_bool(stress_pointer_maps, false,"pointer map for every instruction") DEFINE_bool(stress_environments, false,"environment for every instruction") DEFINE_int(deopt_every_n_times, 0,"deoptimize every n times a deopt point is passed") DEFINE_bool(trap_on_deopt, false,"put a break point before deoptimizing") DEFINE_bool(deoptimize_uncommon_cases, true,"deoptimize uncommon cases") DEFINE_bool(polymorphic_inlining, true,"polymorphic inlining") DEFINE_bool(use_osr, true,"use on-stack replacement") DEFINE_bool(array_bounds_checks_elimination, false,"perform array bounds checks elimination") DEFINE_bool(array_index_dehoisting, false,"perform array index dehoisting") DEFINE_bool(trace_osr, false,"trace on-stack replacement") DEFINE_int(stress_runs, 0,"number of stress runs") DEFINE_bool(optimize_closures, true,"optimize closures") DEFINE_bool(inline_construct, true,"inline constructor calls") DEFINE_bool(inline_arguments, true,"inline functions with arguments object") DEFINE_int(loop_weight, 1,"loop weight for representation inference") DEFINE_bool(optimize_for_in, true,"optimize functions containing for-in loops") DEFINE_bool(experimental_profiler, true,"enable all profiler experiments") DEFINE_bool(watch_ic_patching, false,"profiler considers IC stability") DEFINE_int(frame_count, 1,"number of stack frames inspected by the profiler") DEFINE_bool(self_optimization, false,"primitive functions trigger their own optimization") DEFINE_bool(direct_self_opt, false,"call recompile stub directly when self-optimizing") DEFINE_bool(retry_self_opt, false,"re-try self-optimization if it failed") DEFINE_bool(count_based_interrupts, false,"trigger profiler ticks based on counting instead of timing") DEFINE_bool(interrupt_at_exit, false,"insert an interrupt check at function exit") DEFINE_bool(weighted_back_edges, false,"weight back edges by jump distance for interrupt triggering") DEFINE_int(interrupt_budget, 5900,"execution budget before interrupt is triggered") DEFINE_int(type_info_threshold, 15,"percentage of ICs that must have type info to allow optimization") DEFINE_int(self_opt_count, 130,"call count before self-optimization") DEFINE_implication(experimental_profiler, watch_ic_patching) DEFINE_implication(experimental_profiler, self_optimization) DEFINE_implication(experimental_profiler, retry_self_opt) DEFINE_implication(experimental_profiler, count_based_interrupts) DEFINE_implication(experimental_profiler, interrupt_at_exit) DEFINE_implication(experimental_profiler, weighted_back_edges) DEFINE_bool(trace_opt_verbose, false,"extra verbose compilation tracing") DEFINE_implication(trace_opt_verbose, trace_opt) DEFINE_bool(debug_code, false,"generate extra code (assertions) for debugging") DEFINE_bool(code_comments, false,"emit comments in code disassembly") DEFINE_bool(enable_sse2, true,"enable use of SSE2 instructions if available") DEFINE_bool(enable_sse3, true,"enable use of SSE3 instructions if available") DEFINE_bool(enable_sse4_1, true,"enable use of SSE4.1 instructions if available") DEFINE_bool(enable_cmov, true,"enable use of CMOV instruction if available") DEFINE_bool(enable_rdtsc, true,"enable use of RDTSC instruction if available") DEFINE_bool(enable_sahf, true,"enable use of SAHF instruction if available (X64 only)") DEFINE_bool(enable_vfp3, true,"enable use of VFP3 instructions if available - this implies ""enabling ARMv7 instructions (ARM only)") DEFINE_bool(enable_armv7, true,"enable use of ARMv7 instructions if available (ARM only)") DEFINE_bool(enable_fpu, true,"enable use of MIPS FPU instructions if available (MIPS only)") DEFINE_string(expose_natives_as, NULL,"expose natives in global object") DEFINE_string(expose_debug_as, NULL,"expose debug in global object") DEFINE_bool(expose_gc, false,"expose gc extension") DEFINE_bool(expose_externalize_string, false,"expose externalize string extension") DEFINE_int(stack_trace_limit, 10,"number of stack frames to capture") DEFINE_bool(builtins_in_stack_traces, false,"show built-in functions in stack traces") DEFINE_bool(disable_native_files, false,"disable builtin natives files") DEFINE_bool(inline_new, true,"use fast inline allocation") DEFINE_bool(stack_trace_on_abort, true,"print a stack trace if an assertion failure occurs") DEFINE_bool(trace, false,"trace function calls") DEFINE_bool(mask_constants_with_cookie, true,"use random jit cookie to mask large constants") DEFINE_bool(lazy, true,"use lazy compilation") DEFINE_bool(trace_opt, false,"trace lazy optimization") DEFINE_bool(trace_opt_stats, false,"trace lazy optimization statistics") DEFINE_bool(opt, true,"use adaptive optimizations") DEFINE_bool(always_opt, false,"always try to optimize functions") DEFINE_bool(prepare_always_opt, false,"prepare for turning on always opt") DEFINE_bool(trace_deopt, false,"trace deoptimization") DEFINE_int(min_preparse_length, 1024,"minimum length for automatic enable preparsing") DEFINE_bool(always_full_compiler, false,"try to use the dedicated run-once backend for all code") DEFINE_bool(trace_bailout, false,"print reasons for falling back to using the classic V8 backend") DEFINE_bool(compilation_cache, true,"enable compilation cache") DEFINE_bool(cache_prototype_transitions, true,"cache prototype transitions") DEFINE_bool(trace_debug_json, false,"trace debugging JSON request/response") DEFINE_bool(debugger_auto_break, true,"automatically set the debug break flag when debugger commands are ""in the queue") DEFINE_bool(enable_liveedit, true,"enable liveedit experimental feature") DEFINE_bool(break_on_abort, true,"always cause a debug break before aborting") DEFINE_int(stack_size, kPointerSize *123,"default size of stack region v8 is allowed to use (in kBytes)") DEFINE_int(max_stack_trace_source_length, 300,"maximum length of function source code printed in a stack trace.") DEFINE_bool(always_inline_smi_code, false,"always inline smi code in non-opt code") DEFINE_int(max_new_space_size, 0,"max size of the new generation (in kBytes)") DEFINE_int(max_old_space_size, 0,"max size of the old generation (in Mbytes)") DEFINE_int(max_executable_size, 0,"max size of executable memory (in Mbytes)") DEFINE_bool(gc_global, false,"always perform global GCs") DEFINE_int(gc_interval,-1,"garbage collect after <n> allocations") DEFINE_bool(trace_gc, false,"print one trace line following each garbage collection") DEFINE_bool(trace_gc_nvp, false,"print one detailed trace line in name=value format ""after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false,"print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false,"print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false,"report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true,"garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true,"flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true,"use incremental marking") DEFINE_bool(incremental_marking_steps, true,"do incremental marking steps") DEFINE_bool(trace_incremental_marking, false,"trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true,"Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false,"Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true,"use inline caching") DEFINE_bool(native_code_counters, false,"generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false,"Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true,"Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false,"Never perform compaction on full GC - testing only") DEFINE_bool(compact_code_space, true,"Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true,"Flush inline caches prior to mark compact collection and ""flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0,"Default seed for initializing random generator ""(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true,"allows verbose printing") DEFINE_bool(allow_natives_syntax, false,"allow natives syntax") DEFINE_bool(trace_sim, false,"Trace simulator execution") DEFINE_bool(check_icache, false,"Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0,"Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8,"Stack alingment in bytes in simulator (4 or 8, 8 is default)") DEFINE_bool(trace_exception, false,"print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false,"preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true,"randomize hashes to avoid predictable hash collisions ""(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0,"Fixed seed to use to hash property keys (0 means random)""(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false,"activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true,"generate optimized regexp code") DEFINE_bool(testing_bool_flag, true,"testing_bool_flag") DEFINE_int(testing_int_flag, 13,"testing_int_flag") DEFINE_float(testing_float_flag, 2.5,"float-flag") DEFINE_string(testing_string_flag,"Hello, world!","string-flag") DEFINE_int(testing_prng_seed, 42,"Seed used for threading test randomness") DEFINE_string(testing_serialization_file,"/tmp/serdes","file in which to serialize heap") DEFINE_bool(help, false,"Print usage message, including flags, on console") DEFINE_bool(dump_counters, false,"Dump counters on exit") DEFINE_string(map_counters,"","Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT,"Pass all remaining arguments to the script. Alias for \"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#47"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2 namespace{struct Flag{enum FlagType{TYPE_BOOL, TYPE_INT, TYPE_FLOAT, TYPE_STRING, TYPE_ARGS} name
static HeapObject * cast(Object *obj)
static Handle< T > cast(Handle< S > that)
static const int kGlobalReceiverOffset
static const int kNativeByteOffset
static XMMRegister FromAllocationIndex(int index)
static bool IsSupported(CpuFeature f)
static const int kStrictModeBitWithinByte
static const int kExternalPointerOffset
virtual ~SafepointGenerator()
static const int kCallerSPOffset
#define ASSERT(condition)
bool CanTransitionToMoreGeneralFastElementsKind(ElementsKind elements_kind, bool allow_only_packed)
const int kPointerSizeLog2
static const int kInstanceSizeOffset
static const int kInObjectFieldCount
static const int kMaximumSlots
MemOperand GlobalObjectOperand()
static const int kInstanceClassNameOffset
static const int kUnusedPropertyFieldsOffset
static const int kGlobalContextOffset
bool IsSimpleMapChangeTransition(ElementsKind from_kind, ElementsKind to_kind)
MemOperand ContextOperand(Register context, int index)
static const int kContextOffset
Handle< String > SubString(Handle< String > str, int start, int end, PretenureFlag pretenure)
static const int kHashFieldOffset
Condition ReverseCondition(Condition cond)
const uint32_t kSlotsZapValue
static const int kLiteralsOffset
STATIC_ASSERT((FixedDoubleArray::kHeaderSize &kDoubleAlignmentMask)==0)
static const int kLengthOffset
static const int kValueOffset
const uint32_t kHoleNanUpper32
Operand FieldOperand(Register object, int offset)
static LConstantOperand * cast(LOperand *op)
const uint32_t kHoleNanLower32
static Register FromAllocationIndex(int index)
static void VPrint(const char *format, va_list args)
static const int kCacheStampOffset
static const int kPropertiesOffset
static const int kInObjectPropertiesOffset
bool IsFastSmiElementsKind(ElementsKind kind)
static int OffsetOfElementAt(int index)
static void Generate(MacroAssembler *masm, Register string, Register index, Register result, Label *call_runtime)
static const int kElementsOffset
static const int kNativeBitWithinByte
static const int kContainsCachedArrayIndexMask
static Vector< T > New(int length)
int ElementsKindToShiftSize(ElementsKind elements_kind)
Vector< const char > CStrVector(const char *data)
static int OffsetOfElementAt(int index)
static const int kLengthOffset
static int SizeFor(int length)
static const int kHeaderSize
static const int kEnumerationIndexOffset
static const int kMapOffset
static const int kValueOffset
bool is(Register reg) const
static const int kLengthOffset
static Address GetDeoptimizationEntry(int id, BailoutType type)
static const int kHasNonInstancePrototype
const Register kScratchRegister
static const int kContextOffset
static const int kFunctionOffset
ElementsKind GetInitialFastElementsKind()
static const uint32_t kSignMask
static const int kStrictModeByteOffset
Condition NegateCondition(Condition cond)
#define ASSERT_EQ(v1, v2)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination trace on stack replacement optimize closures functions with arguments object optimize functions containing for in loops profiler considers IC stability primitive functions trigger their own optimization re try self optimization if it failed insert an interrupt check at function exit execution budget before interrupt is triggered call count before self optimization self_optimization count_based_interrupts weighted_back_edges trace_opt emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 enable use of ARMv7 instructions if enable use of MIPS FPU instructions if NULL
static const int kElementsKindShift
static const int kConstructorOffset
static double canonical_not_the_hole_nan_as_double()
#define ASSERT_NE(v1, v2)
static const int kIsUndetectable
static const int kHeaderSize
static const int kMaximumClonedProperties
static const int kPrototypeOffset
#define RUNTIME_ENTRY(name, nargs, ressize)
static const int kMaxLength
static const int kValueOffset
static const int kMarkerOffset
static const int kHashShift
static const int kSharedFunctionInfoOffset
Register ToRegister(int num)
static const int kMaxValue
static const int kBitField2Offset
static HValue * cast(HValue *value)
static Handle< Code > GetUninitialized(Token::Value op)
static const int kMaximumClonedLength
static const int kExponentOffset
bool EvalComparison(Token::Value op, double op1, double op2)
static JSObject * cast(Object *obj)
bool IsFastDoubleElementsKind(ElementsKind kind)
SafepointGenerator(LCodeGen *codegen, LPointerMap *pointers, Safepoint::DeoptMode mode)
static const int kInstanceTypeOffset
virtual void BeforeCall(int call_size) const
static const int kPreAllocatedPropertyFieldsOffset