30 #if V8_TARGET_ARCH_X64
46 : Assembler(arg_isolate, buffer, size),
47 generating_stub_(
false),
49 root_array_available_(
true) {
50 if (isolate() !=
NULL) {
51 code_object_ = Handle<Object>(isolate()->heap()->undefined_value(),
57 static const int kInvalidRootRegisterDelta = -1;
60 intptr_t MacroAssembler::RootRegisterDelta(ExternalReference other) {
61 if (predictable_code_size() &&
62 (other.address() <
reinterpret_cast<Address>(isolate()) ||
63 other.address() >=
reinterpret_cast<Address>(isolate() + 1))) {
64 return kInvalidRootRegisterDelta;
67 reinterpret_cast<Address>(isolate()->heap()->roots_array_start());
68 intptr_t delta = other.address() - roots_register_value;
73 Operand MacroAssembler::ExternalOperand(ExternalReference target,
75 if (root_array_available_ && !Serializer::enabled()) {
76 intptr_t delta = RootRegisterDelta(target);
77 if (delta != kInvalidRootRegisterDelta && is_int32(delta)) {
78 Serializer::TooLateToEnableNow();
82 Move(scratch, target);
83 return Operand(scratch, 0);
88 if (root_array_available_ && !Serializer::enabled()) {
89 intptr_t delta = RootRegisterDelta(source);
90 if (delta != kInvalidRootRegisterDelta && is_int32(delta)) {
91 Serializer::TooLateToEnableNow();
92 movp(destination, Operand(
kRootRegister, static_cast<int32_t>(delta)));
97 if (destination.is(
rax)) {
106 void MacroAssembler::Store(ExternalReference destination, Register source) {
107 if (root_array_available_ && !Serializer::enabled()) {
108 intptr_t delta = RootRegisterDelta(destination);
109 if (delta != kInvalidRootRegisterDelta && is_int32(delta)) {
110 Serializer::TooLateToEnableNow();
111 movp(Operand(
kRootRegister, static_cast<int32_t>(delta)), source);
116 if (source.is(
rax)) {
117 store_rax(destination);
125 void MacroAssembler::LoadAddress(Register destination,
126 ExternalReference source) {
127 if (root_array_available_ && !Serializer::enabled()) {
128 intptr_t delta = RootRegisterDelta(source);
129 if (delta != kInvalidRootRegisterDelta && is_int32(delta)) {
130 Serializer::TooLateToEnableNow();
131 leap(destination, Operand(
kRootRegister, static_cast<int32_t>(delta)));
136 Move(destination, source);
140 int MacroAssembler::LoadAddressSize(ExternalReference source) {
141 if (root_array_available_ && !Serializer::enabled()) {
145 intptr_t delta = RootRegisterDelta(source);
146 if (delta != kInvalidRootRegisterDelta && is_int32(delta)) {
147 Serializer::TooLateToEnableNow();
151 if (!is_int8(static_cast<int32_t>(delta))) {
158 return Assembler::kMoveAddressIntoScratchRegisterInstructionLength;
162 void MacroAssembler::PushAddress(ExternalReference source) {
163 int64_t address =
reinterpret_cast<int64_t
>(source.address());
164 if (is_int32(address) && !Serializer::enabled()) {
165 if (emit_debug_code()) {
168 Push(Immediate(static_cast<int32_t>(address)));
176 void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) {
177 ASSERT(root_array_available_);
183 void MacroAssembler::LoadRootIndexed(Register destination,
184 Register variable_offset,
186 ASSERT(root_array_available_);
194 void MacroAssembler::StoreRoot(Register source, Heap::RootListIndex index) {
195 ASSERT(root_array_available_);
201 void MacroAssembler::PushRoot(Heap::RootListIndex index) {
202 ASSERT(root_array_available_);
207 void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
208 ASSERT(root_array_available_);
214 void MacroAssembler::CompareRoot(
const Operand& with,
215 Heap::RootListIndex index) {
216 ASSERT(root_array_available_);
223 void MacroAssembler::RememberedSetHelper(Register
object,
227 RememberedSetFinalAction and_then) {
228 if (emit_debug_code()) {
230 JumpIfNotInNewSpace(
object, scratch, &ok, Label::kNear);
235 LoadRoot(scratch, Heap::kStoreBufferTopRootIndex);
237 movp(Operand(scratch, 0), addr);
241 StoreRoot(scratch, Heap::kStoreBufferTopRootIndex);
245 testp(scratch, Immediate(StoreBuffer::kStoreBufferOverflowBit));
246 if (and_then == kReturnAtEnd) {
247 Label buffer_overflowed;
248 j(
not_equal, &buffer_overflowed, Label::kNear);
250 bind(&buffer_overflowed);
252 ASSERT(and_then == kFallThroughAtEnd);
253 j(
equal, &done, Label::kNear);
255 StoreBufferOverflowStub store_buffer_overflow =
256 StoreBufferOverflowStub(save_fp);
257 CallStub(&store_buffer_overflow);
258 if (and_then == kReturnAtEnd) {
261 ASSERT(and_then == kFallThroughAtEnd);
267 void MacroAssembler::InNewSpace(Register
object,
271 Label::Distance distance) {
272 if (Serializer::enabled()) {
277 if (scratch.is(
object)) {
281 Move(scratch, ExternalReference::new_space_mask(isolate()));
282 andp(scratch,
object);
286 j(cc, branch, distance);
288 ASSERT(is_int32(static_cast<int64_t>(isolate()->heap()->NewSpaceMask())));
289 intptr_t new_space_start =
290 reinterpret_cast<intptr_t
>(isolate()->heap()->NewSpaceStart());
292 Assembler::RelocInfoNone());
293 if (scratch.is(
object)) {
299 Immediate(static_cast<int32_t>(isolate()->heap()->NewSpaceMask())));
300 j(cc, branch, distance);
305 void MacroAssembler::RecordWriteField(
319 JumpIfSmi(value, &done);
327 if (emit_debug_code()) {
330 j(
zero, &ok, Label::kNear);
336 object, dst, value, save_fp, remembered_set_action,
OMIT_SMI_CHECK);
342 if (emit_debug_code()) {
343 Move(value,
kZapValue, Assembler::RelocInfoNone());
344 Move(dst,
kZapValue, Assembler::RelocInfoNone());
349 void MacroAssembler::RecordWriteArray(Register
object,
361 JumpIfSmi(value, &done);
365 Register dst = index;
370 object, dst, value, save_fp, remembered_set_action,
OMIT_SMI_CHECK);
376 if (emit_debug_code()) {
377 Move(value,
kZapValue, Assembler::RelocInfoNone());
378 Move(index,
kZapValue, Assembler::RelocInfoNone());
383 void MacroAssembler::RecordWrite(Register
object,
389 ASSERT(!
object.is(value));
390 ASSERT(!
object.is(address));
391 ASSERT(!value.is(address));
392 AssertNotSmi(
object);
395 !FLAG_incremental_marking) {
399 if (emit_debug_code()) {
401 cmpp(value, Operand(address, 0));
402 j(
equal, &ok, Label::kNear);
408 isolate()->counters()->write_barriers_static()->Increment();
409 IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1);
417 JumpIfSmi(value, &done);
422 MemoryChunk::kPointersToHereAreInterestingMask,
427 CheckPageFlag(
object,
429 MemoryChunk::kPointersFromHereAreInterestingMask,
434 RecordWriteStub stub(
object, value, address, remembered_set_action, fp_mode);
441 if (emit_debug_code()) {
442 Move(address,
kZapValue, Assembler::RelocInfoNone());
443 Move(value,
kZapValue, Assembler::RelocInfoNone());
449 if (emit_debug_code()) Check(cc, reason);
453 void MacroAssembler::AssertFastElements(Register elements) {
454 if (emit_debug_code()) {
456 CompareRoot(
FieldOperand(elements, HeapObject::kMapOffset),
457 Heap::kFixedArrayMapRootIndex);
458 j(
equal, &ok, Label::kNear);
459 CompareRoot(
FieldOperand(elements, HeapObject::kMapOffset),
460 Heap::kFixedDoubleArrayMapRootIndex);
461 j(
equal, &ok, Label::kNear);
462 CompareRoot(
FieldOperand(elements, HeapObject::kMapOffset),
463 Heap::kFixedCOWArrayMapRootIndex);
464 j(
equal, &ok, Label::kNear);
465 Abort(kJSObjectWithFastElementsMapHasSlowElements);
473 j(cc, &L, Label::kNear);
480 void MacroAssembler::CheckStackAlignment() {
481 int frame_alignment = OS::ActivationFrameAlignment();
482 int frame_alignment_mask = frame_alignment - 1;
485 Label alignment_as_expected;
486 testp(
rsp, Immediate(frame_alignment_mask));
487 j(
zero, &alignment_as_expected, Label::kNear);
490 bind(&alignment_as_expected);
495 void MacroAssembler::NegativeZeroTest(Register result,
499 testl(result, result);
511 RecordComment(
"Abort message: ");
515 if (FLAG_trap_on_abort) {
523 Assembler::RelocInfoNone());
530 CallRuntime(Runtime::kAbort, 1);
532 CallRuntime(Runtime::kAbort, 1);
539 void MacroAssembler::CallStub(CodeStub* stub, TypeFeedbackId ast_id) {
540 ASSERT(AllowThisStubCall(stub));
541 Call(stub->GetCode(isolate()), RelocInfo::CODE_TARGET, ast_id);
545 void MacroAssembler::TailCallStub(CodeStub* stub) {
546 Jump(stub->GetCode(isolate()), RelocInfo::CODE_TARGET);
550 void MacroAssembler::StubReturn(
int argc) {
551 ASSERT(argc >= 1 && generating_stub());
556 bool MacroAssembler::AllowThisStubCall(CodeStub* stub) {
557 return has_frame_ || !stub->SometimesSetsUpAFrame();
561 void MacroAssembler::IllegalOperation(
int num_arguments) {
562 if (num_arguments > 0) {
565 LoadRoot(
rax, Heap::kUndefinedValueRootIndex);
569 void MacroAssembler::IndexFromHash(Register hash, Register index) {
574 (1 << String::kArrayIndexValueBits));
579 andp(hash, Immediate(String::kArrayIndexValueMask));
580 shr(hash, Immediate(String::kHashShift));
584 Integer32ToSmi(index, hash);
588 void MacroAssembler::CallRuntime(
const Runtime::Function* f,
594 if (f->nargs >= 0 && f->nargs != num_arguments) {
595 IllegalOperation(num_arguments);
603 Set(
rax, num_arguments);
604 LoadAddress(
rbx, ExternalReference(f, isolate()));
605 CEntryStub ces(f->result_size, save_doubles);
610 void MacroAssembler::CallExternalReference(
const ExternalReference& ext,
612 Set(
rax, num_arguments);
613 LoadAddress(
rbx, ext);
620 void MacroAssembler::TailCallExternalReference(
const ExternalReference& ext,
634 Set(
rax, num_arguments);
635 JumpToExternalReference(ext, result_size);
639 void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
642 TailCallExternalReference(ExternalReference(fid, isolate()),
648 static int Offset(ExternalReference ref0, ExternalReference ref1) {
649 int64_t offset = (ref0.address() - ref1.address());
651 ASSERT(static_cast<int>(offset) == offset);
652 return static_cast<int>(offset);
656 void MacroAssembler::PrepareCallApiFunction(
int arg_stack_space) {
657 EnterApiExitFrame(arg_stack_space);
661 void MacroAssembler::CallApiFunctionAndReturn(
662 Register function_address,
664 Register thunk_last_arg,
666 Operand return_value_operand,
667 Operand* context_restore_operand) {
669 Label promote_scheduled_exception;
670 Label exception_handled;
671 Label delete_allocated_handles;
672 Label leave_exit_frame;
675 Factory* factory = isolate()->factory();
676 ExternalReference next_address =
677 ExternalReference::handle_scope_next_address(isolate());
678 const int kNextOffset = 0;
679 const int kLimitOffset =
Offset(
680 ExternalReference::handle_scope_limit_address(isolate()),
682 const int kLevelOffset =
Offset(
683 ExternalReference::handle_scope_level_address(isolate()),
685 ExternalReference scheduled_exception_address =
686 ExternalReference::scheduled_exception_address(isolate());
690 Register prev_next_address_reg =
r14;
691 Register prev_limit_reg =
rbx;
692 Register base_reg =
r15;
693 Move(base_reg, next_address);
694 movp(prev_next_address_reg, Operand(base_reg, kNextOffset));
695 movp(prev_limit_reg, Operand(base_reg, kLimitOffset));
696 addl(Operand(base_reg, kLevelOffset), Immediate(1));
698 if (FLAG_log_timer_events) {
699 FrameScope frame(
this, StackFrame::MANUAL);
700 PushSafepointRegisters();
701 PrepareCallCFunction(1);
702 LoadAddress(
arg_reg_1, ExternalReference::isolate_address(isolate()));
703 CallCFunction(ExternalReference::log_enter_external_function(isolate()), 1);
704 PopSafepointRegisters();
708 Label profiler_disabled;
709 Label end_profiler_check;
710 bool* is_profiling_flag =
711 isolate()->cpu_profiler()->is_profiling_address();
713 Move(
rax, is_profiling_flag, RelocInfo::EXTERNAL_REFERENCE);
714 cmpb(Operand(
rax, 0), Immediate(0));
715 j(
zero, &profiler_disabled);
718 Move(thunk_last_arg, function_address);
719 Move(
rax, thunk_address, RelocInfo::EXTERNAL_REFERENCE);
720 jmp(&end_profiler_check);
722 bind(&profiler_disabled);
724 Move(
rax, function_address);
726 bind(&end_profiler_check);
731 if (FLAG_log_timer_events) {
732 FrameScope frame(
this, StackFrame::MANUAL);
733 PushSafepointRegisters();
734 PrepareCallCFunction(1);
735 LoadAddress(
arg_reg_1, ExternalReference::isolate_address(isolate()));
736 CallCFunction(ExternalReference::log_leave_external_function(isolate()), 1);
737 PopSafepointRegisters();
741 movp(
rax, return_value_operand);
746 subl(Operand(base_reg, kLevelOffset), Immediate(1));
747 movp(Operand(base_reg, kNextOffset), prev_next_address_reg);
748 cmpp(prev_limit_reg, Operand(base_reg, kLimitOffset));
750 bind(&leave_exit_frame);
753 Move(
rsi, scheduled_exception_address);
754 Cmp(Operand(
rsi, 0), factory->the_hole_value());
755 j(
not_equal, &promote_scheduled_exception);
756 bind(&exception_handled);
758 #if ENABLE_EXTRA_CHECKS
761 Register return_value =
rax;
764 JumpIfSmi(return_value, &ok, Label::kNear);
765 movp(map,
FieldOperand(return_value, HeapObject::kMapOffset));
768 j(
below, &ok, Label::kNear);
773 CompareRoot(map, Heap::kHeapNumberMapRootIndex);
774 j(
equal, &ok, Label::kNear);
776 CompareRoot(return_value, Heap::kUndefinedValueRootIndex);
777 j(
equal, &ok, Label::kNear);
779 CompareRoot(return_value, Heap::kTrueValueRootIndex);
780 j(
equal, &ok, Label::kNear);
782 CompareRoot(return_value, Heap::kFalseValueRootIndex);
783 j(
equal, &ok, Label::kNear);
785 CompareRoot(return_value, Heap::kNullValueRootIndex);
786 j(
equal, &ok, Label::kNear);
788 Abort(kAPICallReturnedInvalidObject);
793 bool restore_context = context_restore_operand !=
NULL;
794 if (restore_context) {
795 movp(
rsi, *context_restore_operand);
797 LeaveApiExitFrame(!restore_context);
800 bind(&promote_scheduled_exception);
803 CallRuntime(Runtime::kHiddenPromoteScheduledException, 0);
805 jmp(&exception_handled);
808 bind(&delete_allocated_handles);
809 movp(Operand(base_reg, kLimitOffset), prev_limit_reg);
810 movp(prev_limit_reg,
rax);
811 LoadAddress(
arg_reg_1, ExternalReference::isolate_address(isolate()));
813 ExternalReference::delete_handle_scope_extensions(isolate()));
815 movp(
rax, prev_limit_reg);
816 jmp(&leave_exit_frame);
820 void MacroAssembler::JumpToExternalReference(
const ExternalReference& ext,
823 LoadAddress(
rbx, ext);
824 CEntryStub ces(result_size);
825 jmp(ces.GetCode(isolate()), RelocInfo::CODE_TARGET);
829 void MacroAssembler::InvokeBuiltin(Builtins::JavaScript
id,
831 const CallWrapper& call_wrapper) {
838 ParameterCount expected(0);
839 GetBuiltinEntry(
rdx,
id);
840 InvokeCode(
rdx, expected, expected, flag, call_wrapper);
844 void MacroAssembler::GetBuiltinFunction(Register target,
845 Builtins::JavaScript
id) {
847 movp(target, Operand(
rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
848 movp(target,
FieldOperand(target, GlobalObject::kBuiltinsOffset));
850 JSBuiltinsObject::OffsetOfFunctionWithId(
id)));
854 void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript
id) {
857 GetBuiltinFunction(
rdi,
id);
862 #define REG(Name) { kRegister_ ## Name ## _Code }
864 static const Register saved_regs[] = {
871 static const int kNumberOfSavedRegs =
sizeof(saved_regs) /
sizeof(Register);
877 Register exclusion3) {
881 for (
int i = 0; i < kNumberOfSavedRegs; i++) {
882 Register reg = saved_regs[i];
883 if (!reg.is(exclusion1) && !reg.is(exclusion2) && !reg.is(exclusion3)) {
889 subp(
rsp, Immediate(
kDoubleSize * XMMRegister::kMaxNumRegisters));
890 for (
int i = 0; i < XMMRegister::kMaxNumRegisters; i++) {
891 XMMRegister reg = XMMRegister::from_code(i);
901 Register exclusion3) {
903 for (
int i = 0; i < XMMRegister::kMaxNumRegisters; i++) {
904 XMMRegister reg = XMMRegister::from_code(i);
907 addp(
rsp, Immediate(
kDoubleSize * XMMRegister::kMaxNumRegisters));
909 for (
int i = kNumberOfSavedRegs - 1; i >= 0; i--) {
910 Register reg = saved_regs[i];
911 if (!reg.is(exclusion1) && !reg.is(exclusion2) && !reg.is(exclusion3)) {
918 void MacroAssembler::Cvtlsi2sd(XMMRegister dst, Register src) {
924 void MacroAssembler::Cvtlsi2sd(XMMRegister dst,
const Operand& src) {
932 if (r.IsInteger8()) {
934 }
else if (r.IsUInteger8()) {
936 }
else if (r.IsInteger16()) {
938 }
else if (r.IsUInteger16()) {
940 }
else if (r.IsInteger32()) {
948 void MacroAssembler::Store(
const Operand& dst, Register src, Representation r) {
950 if (r.IsInteger8() || r.IsUInteger8()) {
952 }
else if (r.IsInteger16() || r.IsUInteger16()) {
954 }
else if (r.IsInteger32()) {
962 void MacroAssembler::Set(Register dst, int64_t x) {
965 }
else if (is_uint32(x)) {
966 movl(dst, Immediate(static_cast<uint32_t>(x)));
967 }
else if (is_int32(x)) {
968 movq(dst, Immediate(static_cast<int32_t>(x)));
975 void MacroAssembler::Set(
const Operand& dst, intptr_t x) {
978 movp(dst, Immediate(static_cast<int32_t>(x)));
985 movp(dst, Immediate(static_cast<int32_t>(x)));
993 bool MacroAssembler::IsUnsafeInt(
const int32_t x) {
994 static const int kMaxBits = 17;
999 void MacroAssembler::SafeMove(Register dst, Smi* src) {
1001 ASSERT(SmiValuesAre32Bits());
1002 if (IsUnsafeInt(src->value()) && jit_cookie() != 0) {
1003 Move(dst, Smi::FromInt(src->value() ^ jit_cookie()));
1012 void MacroAssembler::SafePush(Smi* src) {
1013 ASSERT(SmiValuesAre32Bits());
1014 if (IsUnsafeInt(src->value()) && jit_cookie() != 0) {
1015 Push(Smi::FromInt(src->value() ^ jit_cookie()));
1024 Register MacroAssembler::GetSmiConstant(Smi* source) {
1025 int value = source->value();
1038 void MacroAssembler::LoadSmiConstant(Register dst, Smi* source) {
1039 if (emit_debug_code()) {
1041 Assembler::RelocInfoNone());
1043 Assert(
equal, kUninitializedKSmiConstantRegister);
1045 int value = source->value();
1051 unsigned int uvalue = negative ? -value : value;
1085 Move(dst, source, Assembler::RelocInfoNone());
1094 void MacroAssembler::Integer32ToSmi(Register dst, Register src) {
1103 void MacroAssembler::Integer32ToSmiField(
const Operand& dst, Register src) {
1104 if (emit_debug_code()) {
1105 testb(dst, Immediate(0x01));
1107 j(
zero, &ok, Label::kNear);
1108 Abort(kInteger32ToSmiFieldWritingToNonSmiLocation);
1116 void MacroAssembler::Integer64PlusConstantToSmi(Register dst,
1120 addl(dst, Immediate(constant));
1122 leal(dst, Operand(src, constant));
1128 void MacroAssembler::SmiToInteger32(Register dst, Register src) {
1137 void MacroAssembler::SmiToInteger32(Register dst,
const Operand& src) {
1142 void MacroAssembler::SmiToInteger64(Register dst, Register src) {
1151 void MacroAssembler::SmiToInteger64(Register dst,
const Operand& src) {
1156 void MacroAssembler::SmiTest(Register src) {
1162 void MacroAssembler::SmiCompare(Register smi1, Register smi2) {
1169 void MacroAssembler::SmiCompare(Register dst, Smi* src) {
1175 void MacroAssembler::Cmp(Register dst, Smi* src) {
1177 if (src->value() == 0) {
1180 Register constant_reg = GetSmiConstant(src);
1181 cmpp(dst, constant_reg);
1186 void MacroAssembler::SmiCompare(Register dst,
const Operand& src) {
1193 void MacroAssembler::SmiCompare(
const Operand& dst, Register src) {
1200 void MacroAssembler::SmiCompare(
const Operand& dst, Smi* src) {
1206 void MacroAssembler::Cmp(
const Operand& dst, Smi* src) {
1208 Register smi_reg = GetSmiConstant(src);
1209 ASSERT(!dst.AddressUsesRegister(smi_reg));
1214 void MacroAssembler::SmiCompareInteger32(
const Operand& dst, Register src) {
1219 void MacroAssembler::PositiveSmiTimesPowerOfTwoToInteger64(Register dst,
1225 SmiToInteger64(dst, src);
1239 void MacroAssembler::PositiveSmiDivPowerOfTwoToInteger32(Register dst,
1242 ASSERT((0 <= power) && (power < 32));
1251 void MacroAssembler::SmiOrIfSmis(Register dst, Register src1, Register src2,
1253 Label::Distance near_jump) {
1254 if (dst.is(src1) || dst.is(src2)) {
1264 JumpIfNotSmi(dst, on_not_smis, near_jump);
1269 Condition MacroAssembler::CheckSmi(Register src) {
1276 Condition MacroAssembler::CheckSmi(
const Operand& src) {
1283 Condition MacroAssembler::CheckNonNegativeSmi(Register src) {
1293 Condition MacroAssembler::CheckBothSmi(Register first, Register second) {
1294 if (first.is(second)) {
1295 return CheckSmi(first);
1304 Condition MacroAssembler::CheckBothNonNegativeSmi(Register first,
1306 if (first.is(second)) {
1307 return CheckNonNegativeSmi(first);
1317 Condition MacroAssembler::CheckEitherSmi(Register first,
1320 if (first.is(second)) {
1321 return CheckSmi(first);
1323 if (scratch.is(second)) {
1324 andl(scratch, first);
1326 if (!scratch.is(first)) {
1327 movl(scratch, first);
1329 andl(scratch, second);
1336 Condition MacroAssembler::CheckIsMinSmi(Register src) {
1344 Condition MacroAssembler::CheckInteger32ValidSmiValue(Register src) {
1350 Condition MacroAssembler::CheckUInteger32ValidSmiValue(Register src) {
1358 void MacroAssembler::CheckSmiToIndicator(Register dst, Register src) {
1368 void MacroAssembler::CheckSmiToIndicator(Register dst,
const Operand& src) {
1369 if (!(src.AddressUsesRegister(dst))) {
1379 void MacroAssembler::JumpIfNotValidSmiValue(Register src,
1381 Label::Distance near_jump) {
1382 Condition is_valid = CheckInteger32ValidSmiValue(src);
1387 void MacroAssembler::JumpIfUIntNotValidSmiValue(Register src,
1389 Label::Distance near_jump) {
1390 Condition is_valid = CheckUInteger32ValidSmiValue(src);
1395 void MacroAssembler::JumpIfSmi(Register src,
1397 Label::Distance near_jump) {
1399 j(smi, on_smi, near_jump);
1403 void MacroAssembler::JumpIfNotSmi(Register src,
1405 Label::Distance near_jump) {
1411 void MacroAssembler::JumpUnlessNonNegativeSmi(
1412 Register src, Label* on_not_smi_or_negative,
1413 Label::Distance near_jump) {
1414 Condition non_negative_smi = CheckNonNegativeSmi(src);
1415 j(
NegateCondition(non_negative_smi), on_not_smi_or_negative, near_jump);
1419 void MacroAssembler::JumpIfSmiEqualsConstant(Register src,
1422 Label::Distance near_jump) {
1423 SmiCompare(src, constant);
1424 j(
equal, on_equals, near_jump);
1428 void MacroAssembler::JumpIfNotBothSmi(Register src1,
1430 Label* on_not_both_smi,
1431 Label::Distance near_jump) {
1432 Condition both_smi = CheckBothSmi(src1, src2);
1437 void MacroAssembler::JumpUnlessBothNonNegativeSmi(Register src1,
1439 Label* on_not_both_smi,
1440 Label::Distance near_jump) {
1441 Condition both_smi = CheckBothNonNegativeSmi(src1, src2);
1446 void MacroAssembler::SmiAddConstant(Register dst, Register src, Smi* constant) {
1447 if (constant->value() == 0) {
1452 }
else if (dst.is(src)) {
1454 switch (constant->value()) {
1468 Register constant_reg = GetSmiConstant(constant);
1469 addp(dst, constant_reg);
1473 switch (constant->value()) {
1487 LoadSmiConstant(dst, constant);
1495 void MacroAssembler::SmiAddConstant(
const Operand& dst, Smi* constant) {
1496 if (constant->value() != 0) {
1502 void MacroAssembler::SmiAddConstant(Register dst,
1505 SmiOperationExecutionMode
mode,
1506 Label* bailout_label,
1507 Label::Distance near_jump) {
1508 if (constant->value() == 0) {
1512 }
else if (dst.is(src)) {
1525 jmp(bailout_label, near_jump);
1529 j(
overflow, bailout_label, near_jump);
1532 CHECK(mode.IsEmpty());
1537 LoadSmiConstant(dst, constant);
1539 j(
overflow, bailout_label, near_jump);
1544 void MacroAssembler::SmiSubConstant(Register dst, Register src, Smi* constant) {
1545 if (constant->value() == 0) {
1549 }
else if (dst.is(src)) {
1551 Register constant_reg = GetSmiConstant(constant);
1552 subp(dst, constant_reg);
1554 if (constant->value() == Smi::kMinValue) {
1555 LoadSmiConstant(dst, constant);
1561 LoadSmiConstant(dst, Smi::FromInt(-constant->value()));
1568 void MacroAssembler::SmiSubConstant(Register dst,
1571 SmiOperationExecutionMode mode,
1572 Label* bailout_label,
1573 Label::Distance near_jump) {
1574 if (constant->value() == 0) {
1578 }
else if (dst.is(src)) {
1591 jmp(bailout_label, near_jump);
1595 j(
overflow, bailout_label, near_jump);
1598 CHECK(mode.IsEmpty());
1603 if (constant->value() == Smi::kMinValue) {
1608 j(
overflow, bailout_label, near_jump);
1611 LoadSmiConstant(dst, Smi::FromInt(-(constant->value())));
1613 j(
overflow, bailout_label, near_jump);
1619 void MacroAssembler::SmiNeg(Register dst,
1621 Label* on_smi_result,
1622 Label::Distance near_jump) {
1642 static void SmiAddHelper(MacroAssembler* masm,
1646 Label* on_not_smi_result,
1647 Label::Distance near_jump) {
1650 masm->addp(dst, src2);
1653 masm->subp(dst, src2);
1654 masm->jmp(on_not_smi_result, near_jump);
1657 masm->movp(dst, src1);
1658 masm->addp(dst, src2);
1659 masm->j(
overflow, on_not_smi_result, near_jump);
1664 void MacroAssembler::SmiAdd(Register dst,
1667 Label* on_not_smi_result,
1668 Label::Distance near_jump) {
1671 SmiAddHelper<Register>(
this, dst, src1, src2, on_not_smi_result, near_jump);
1675 void MacroAssembler::SmiAdd(Register dst,
1677 const Operand& src2,
1678 Label* on_not_smi_result,
1679 Label::Distance near_jump) {
1681 ASSERT(!src2.AddressUsesRegister(dst));
1682 SmiAddHelper<Operand>(
this, dst, src1, src2, on_not_smi_result, near_jump);
1686 void MacroAssembler::SmiAdd(Register dst,
1691 if (!dst.is(src1)) {
1692 if (emit_debug_code()) {
1697 leap(dst, Operand(src1, src2,
times_1, 0));
1706 static void SmiSubHelper(MacroAssembler* masm,
1710 Label* on_not_smi_result,
1711 Label::Distance near_jump) {
1714 masm->subp(dst, src2);
1717 masm->addp(dst, src2);
1718 masm->jmp(on_not_smi_result, near_jump);
1721 masm->movp(dst, src1);
1722 masm->subp(dst, src2);
1723 masm->j(
overflow, on_not_smi_result, near_jump);
1728 void MacroAssembler::SmiSub(Register dst,
1731 Label* on_not_smi_result,
1732 Label::Distance near_jump) {
1735 SmiSubHelper<Register>(
this, dst, src1, src2, on_not_smi_result, near_jump);
1739 void MacroAssembler::SmiSub(Register dst,
1741 const Operand& src2,
1742 Label* on_not_smi_result,
1743 Label::Distance near_jump) {
1745 ASSERT(!src2.AddressUsesRegister(dst));
1746 SmiSubHelper<Operand>(
this, dst, src1, src2, on_not_smi_result, near_jump);
1751 static void SmiSubNoOverflowHelper(MacroAssembler* masm,
1757 if (!dst.is(src1)) {
1758 masm->movp(dst, src1);
1760 masm->subp(dst, src2);
1761 masm->Assert(
no_overflow, kSmiSubtractionOverflow);
1765 void MacroAssembler::SmiSub(Register dst, Register src1, Register src2) {
1767 SmiSubNoOverflowHelper<Register>(
this, dst, src1, src2);
1771 void MacroAssembler::SmiSub(Register dst,
1773 const Operand& src2) {
1774 SmiSubNoOverflowHelper<Operand>(
this, dst, src1, src2);
1778 void MacroAssembler::SmiMul(Register dst,
1781 Label* on_not_smi_result,
1782 Label::Distance near_jump) {
1789 Label failure, zero_correct_result;
1791 SmiToInteger64(dst, src1);
1793 j(
overflow, &failure, Label::kNear);
1797 Label correct_result;
1799 j(
not_zero, &correct_result, Label::kNear);
1804 j(
positive, &zero_correct_result, Label::kNear);
1808 jmp(on_not_smi_result, near_jump);
1810 bind(&zero_correct_result);
1813 bind(&correct_result);
1815 SmiToInteger64(dst, src1);
1817 j(
overflow, on_not_smi_result, near_jump);
1820 Label correct_result;
1822 j(
not_zero, &correct_result, Label::kNear);
1827 j(negative, on_not_smi_result, near_jump);
1828 bind(&correct_result);
1833 void MacroAssembler::SmiDiv(Register dst,
1836 Label* on_not_smi_result,
1837 Label::Distance near_jump) {
1847 j(
zero, on_not_smi_result, near_jump);
1852 SmiToInteger32(
rax, src1);
1861 testl(
rax, Immediate(0x7fffffff));
1862 j(
not_zero, &safe_div, Label::kNear);
1865 j(
positive, &safe_div, Label::kNear);
1867 jmp(on_not_smi_result, near_jump);
1869 j(negative, on_not_smi_result, near_jump);
1873 SmiToInteger32(src2, src2);
1877 Integer32ToSmi(src2, src2);
1882 j(
zero, &smi_result, Label::kNear);
1884 jmp(on_not_smi_result, near_jump);
1887 j(
not_zero, on_not_smi_result, near_jump);
1889 if (!dst.is(src1) && src1.is(
rax)) {
1892 Integer32ToSmi(dst,
rax);
1896 void MacroAssembler::SmiMod(Register dst,
1899 Label* on_not_smi_result,
1900 Label::Distance near_jump) {
1910 j(
zero, on_not_smi_result, near_jump);
1915 SmiToInteger32(
rax, src1);
1916 SmiToInteger32(src2, src2);
1920 cmpl(
rax, Immediate(Smi::kMinValue));
1922 cmpl(src2, Immediate(-1));
1925 Integer32ToSmi(src2, src2);
1929 jmp(on_not_smi_result, near_jump);
1936 Integer32ToSmi(src2, src2);
1944 j(
not_zero, &smi_result, Label::kNear);
1946 j(negative, on_not_smi_result, near_jump);
1948 Integer32ToSmi(dst,
rdx);
1952 void MacroAssembler::SmiNot(Register dst, Register src) {
1966 void MacroAssembler::SmiAnd(Register dst, Register src1, Register src2) {
1968 if (!dst.is(src1)) {
1975 void MacroAssembler::SmiAndConstant(Register dst, Register src, Smi* constant) {
1976 if (constant->value() == 0) {
1978 }
else if (dst.is(src)) {
1980 Register constant_reg = GetSmiConstant(constant);
1981 andp(dst, constant_reg);
1983 LoadSmiConstant(dst, constant);
1989 void MacroAssembler::SmiOr(Register dst, Register src1, Register src2) {
1990 if (!dst.is(src1)) {
1998 void MacroAssembler::SmiOrConstant(Register dst, Register src, Smi* constant) {
2001 Register constant_reg = GetSmiConstant(constant);
2002 orp(dst, constant_reg);
2004 LoadSmiConstant(dst, constant);
2010 void MacroAssembler::SmiXor(Register dst, Register src1, Register src2) {
2011 if (!dst.is(src1)) {
2019 void MacroAssembler::SmiXorConstant(Register dst, Register src, Smi* constant) {
2022 Register constant_reg = GetSmiConstant(constant);
2023 xorp(dst, constant_reg);
2025 LoadSmiConstant(dst, constant);
2031 void MacroAssembler::SmiShiftArithmeticRightConstant(Register dst,
2034 ASSERT(is_uint5(shift_value));
2035 if (shift_value > 0) {
2037 sar(dst, Immediate(shift_value +
kSmiShift));
2046 void MacroAssembler::SmiShiftLeftConstant(Register dst,
2052 if (shift_value > 0) {
2053 shl(dst, Immediate(shift_value));
2058 void MacroAssembler::SmiShiftLogicalRightConstant(
2059 Register dst, Register src,
int shift_value,
2060 Label* on_not_smi_result, Label::Distance near_jump) {
2066 if (shift_value == 0) {
2068 j(negative, on_not_smi_result, near_jump);
2070 shr(dst, Immediate(shift_value +
kSmiShift));
2076 void MacroAssembler::SmiShiftLeft(Register dst,
2081 if (!dst.is(src1)) {
2084 SmiToInteger32(
rcx, src2);
2086 andq(
rcx, Immediate(0x1f));
2091 void MacroAssembler::SmiShiftLogicalRight(Register dst,
2094 Label* on_not_smi_result,
2095 Label::Distance near_jump) {
2102 if (src1.is(
rcx) || src2.is(
rcx)) {
2105 if (!dst.is(src1)) {
2108 SmiToInteger32(
rcx, src2);
2113 if (src1.is(
rcx) || src2.is(
rcx)) {
2114 Label positive_result;
2115 j(
positive, &positive_result, Label::kNear);
2121 jmp(on_not_smi_result, near_jump);
2122 bind(&positive_result);
2125 j(negative, on_not_smi_result, near_jump);
2130 void MacroAssembler::SmiShiftArithmeticRight(Register dst,
2139 }
else if (src2.is(
rcx)) {
2142 if (!dst.is(src1)) {
2145 SmiToInteger32(
rcx, src2);
2151 }
else if (src2.is(
rcx)) {
2157 void MacroAssembler::SelectNonSmi(Register dst,
2161 Label::Distance near_jump) {
2170 Check(not_both_smis, kBothRegistersWereSmisInSelectNonSmi);
2178 j(
not_zero, on_not_smis, near_jump);
2194 SmiIndex MacroAssembler::SmiToIndex(Register dst,
2208 return SmiIndex(dst,
times_1);
2211 SmiIndex MacroAssembler::SmiToNegativeIndex(Register dst,
2225 return SmiIndex(dst,
times_1);
2229 void MacroAssembler::AddSmiField(Register dst,
const Operand& src) {
2235 void MacroAssembler::Push(Smi* source) {
2236 intptr_t smi =
reinterpret_cast<intptr_t
>(source);
2237 if (is_int32(smi)) {
2238 Push(Immediate(static_cast<int32_t>(smi)));
2240 Register constant = GetSmiConstant(source);
2246 void MacroAssembler::PushInt64AsTwoSmis(Register src, Register scratch) {
2258 void MacroAssembler::PopInt64AsTwoSmis(Register dst, Register scratch) {
2270 void MacroAssembler::Test(
const Operand& src, Smi* source) {
2271 testl(Operand(src,
kIntSize), Immediate(source->value()));
2278 void MacroAssembler::LookupNumberStringCache(Register
object,
2284 Register number_string_cache = result;
2285 Register mask = scratch1;
2286 Register scratch = scratch2;
2289 LoadRoot(number_string_cache, Heap::kNumberStringCacheRootIndex);
2294 mask,
FieldOperand(number_string_cache, FixedArray::kLengthOffset));
2295 shrl(mask, Immediate(1));
2296 subp(mask, Immediate(1));
2303 Label load_result_from_cache;
2304 JumpIfSmi(
object, &is_smi);
2306 isolate()->factory()->heap_number_map(),
2311 movl(scratch,
FieldOperand(
object, HeapNumber::kValueOffset + 4));
2312 xorp(scratch,
FieldOperand(
object, HeapNumber::kValueOffset));
2313 andp(scratch, mask);
2320 Register index = scratch;
2321 Register probe = mask;
2326 FixedArray::kHeaderSize));
2327 JumpIfSmi(probe, not_found);
2332 jmp(&load_result_from_cache);
2335 SmiToInteger32(scratch,
object);
2336 andp(scratch, mask);
2348 FixedArray::kHeaderSize));
2352 bind(&load_result_from_cache);
2358 IncrementCounter(isolate()->counters()->number_to_string_native(), 1);
2362 void MacroAssembler::JumpIfNotString(Register
object,
2363 Register object_map,
2365 Label::Distance near_jump) {
2367 j(is_smi, not_string, near_jump);
2373 void MacroAssembler::JumpIfNotBothSequentialAsciiStrings(
2374 Register first_object,
2375 Register second_object,
2379 Label::Distance near_jump) {
2381 Condition either_smi = CheckEitherSmi(first_object, second_object);
2382 j(either_smi, on_fail, near_jump);
2385 movp(scratch1,
FieldOperand(first_object, HeapObject::kMapOffset));
2386 movp(scratch2,
FieldOperand(second_object, HeapObject::kMapOffset));
2387 movzxbl(scratch1,
FieldOperand(scratch1, Map::kInstanceTypeOffset));
2388 movzxbl(scratch2,
FieldOperand(scratch2, Map::kInstanceTypeOffset));
2392 const int kFlatAsciiStringMask =
2394 const int kFlatAsciiStringTag =
2397 andl(scratch1, Immediate(kFlatAsciiStringMask));
2398 andl(scratch2, Immediate(kFlatAsciiStringMask));
2400 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
2401 leap(scratch1, Operand(scratch1, scratch2,
times_8, 0));
2403 Immediate(kFlatAsciiStringTag + (kFlatAsciiStringTag << 3)));
2408 void MacroAssembler::JumpIfInstanceTypeIsNotSequentialAscii(
2409 Register instance_type,
2412 Label::Distance near_jump) {
2413 if (!scratch.is(instance_type)) {
2414 movl(scratch, instance_type);
2417 const int kFlatAsciiStringMask =
2420 andl(scratch, Immediate(kFlatAsciiStringMask));
2426 void MacroAssembler::JumpIfBothInstanceTypesAreNotSequentialAscii(
2427 Register first_object_instance_type,
2428 Register second_object_instance_type,
2432 Label::Distance near_jump) {
2434 movp(scratch1, first_object_instance_type);
2435 movp(scratch2, second_object_instance_type);
2439 const int kFlatAsciiStringMask =
2441 const int kFlatAsciiStringTag =
2444 andl(scratch1, Immediate(kFlatAsciiStringMask));
2445 andl(scratch2, Immediate(kFlatAsciiStringMask));
2447 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
2448 leap(scratch1, Operand(scratch1, scratch2,
times_8, 0));
2450 Immediate(kFlatAsciiStringTag + (kFlatAsciiStringTag << 3)));
2456 static void JumpIfNotUniqueNameHelper(MacroAssembler* masm,
2457 T operand_or_register,
2458 Label* not_unique_name,
2459 Label::Distance distance) {
2462 masm->testb(operand_or_register,
2464 masm->j(
zero, &succeed, Label::kNear);
2465 masm->cmpb(operand_or_register, Immediate(static_cast<uint8_t>(
SYMBOL_TYPE)));
2466 masm->j(
not_equal, not_unique_name, distance);
2468 masm->bind(&succeed);
2472 void MacroAssembler::JumpIfNotUniqueName(Operand operand,
2473 Label* not_unique_name,
2474 Label::Distance distance) {
2475 JumpIfNotUniqueNameHelper<Operand>(
this, operand, not_unique_name, distance);
2479 void MacroAssembler::JumpIfNotUniqueName(Register reg,
2480 Label* not_unique_name,
2481 Label::Distance distance) {
2482 JumpIfNotUniqueNameHelper<Register>(
this, reg, not_unique_name, distance);
2486 void MacroAssembler::Move(Register dst, Register src) {
2493 void MacroAssembler::Move(Register dst, Handle<Object> source) {
2495 if (source->IsSmi()) {
2496 Move(dst, Smi::cast(*source));
2498 MoveHeapObject(dst, source);
2503 void MacroAssembler::Move(
const Operand& dst, Handle<Object> source) {
2505 if (source->IsSmi()) {
2506 Move(dst, Smi::cast(*source));
2514 void MacroAssembler::Cmp(Register dst, Handle<Object> source) {
2516 if (source->IsSmi()) {
2517 Cmp(dst, Smi::cast(*source));
2525 void MacroAssembler::Cmp(
const Operand& dst, Handle<Object> source) {
2527 if (source->IsSmi()) {
2528 Cmp(dst, Smi::cast(*source));
2536 void MacroAssembler::Push(Handle<Object> source) {
2538 if (source->IsSmi()) {
2539 Push(Smi::cast(*source));
2547 void MacroAssembler::MoveHeapObject(Register result,
2548 Handle<Object>
object) {
2550 ASSERT(object->IsHeapObject());
2551 if (isolate()->heap()->InNewSpace(*
object)) {
2552 Handle<Cell> cell = isolate()->factory()->NewCell(
object);
2553 Move(result, cell, RelocInfo::CELL);
2554 movp(result, Operand(result, 0));
2556 Move(result,
object, RelocInfo::EMBEDDED_OBJECT);
2561 void MacroAssembler::LoadGlobalCell(Register dst, Handle<Cell> cell) {
2564 load_rax(cell.location(), RelocInfo::CELL);
2566 Move(dst, cell, RelocInfo::CELL);
2567 movp(dst, Operand(dst, 0));
2572 void MacroAssembler::Drop(
int stack_elements) {
2573 if (stack_elements > 0) {
2579 void MacroAssembler::Push(Register src) {
2586 leal(
rsp, Operand(
rsp, -4));
2587 movp(Operand(
rsp, 0), src);
2592 void MacroAssembler::Push(
const Operand& src) {
2598 leal(
rsp, Operand(
rsp, -4));
2604 void MacroAssembler::Push(Immediate value) {
2609 leal(
rsp, Operand(
rsp, -4));
2610 movp(Operand(
rsp, 0), value);
2615 void MacroAssembler::PushImm32(
int32_t imm32) {
2620 leal(
rsp, Operand(
rsp, -4));
2621 movp(Operand(
rsp, 0), Immediate(imm32));
2626 void MacroAssembler::Pop(Register dst) {
2633 movp(dst, Operand(
rsp, 0));
2634 leal(
rsp, Operand(
rsp, 4));
2639 void MacroAssembler::Pop(
const Operand& dst) {
2646 movp(scratch, Operand(
rsp, 0));
2648 leal(
rsp, Operand(
rsp, 4));
2653 Assembler::RelocInfoNone());
2659 void MacroAssembler::TestBit(
const Operand& src,
int bits) {
2662 testb(Operand(src, byte_offset), Immediate(1 << bit_in_byte));
2666 void MacroAssembler::Jump(ExternalReference ext) {
2672 void MacroAssembler::Jump(
const Operand& op) {
2683 void MacroAssembler::Jump(
Address destination, RelocInfo::Mode rmode) {
2689 void MacroAssembler::Jump(Handle<Code> code_object, RelocInfo::Mode rmode) {
2691 jmp(code_object, rmode);
2695 int MacroAssembler::CallSize(ExternalReference ext) {
2697 return LoadAddressSize(ext) +
2698 Assembler::kCallScratchRegisterInstructionLength;
2702 void MacroAssembler::Call(ExternalReference ext) {
2704 int end_position = pc_offset() + CallSize(ext);
2709 CHECK_EQ(end_position, pc_offset());
2714 void MacroAssembler::Call(
const Operand& op) {
2725 void MacroAssembler::Call(
Address destination, RelocInfo::Mode rmode) {
2727 int end_position = pc_offset() + CallSize(destination);
2732 CHECK_EQ(pc_offset(), end_position);
2737 void MacroAssembler::Call(Handle<Code> code_object,
2738 RelocInfo::Mode rmode,
2739 TypeFeedbackId ast_id) {
2741 int end_position = pc_offset() + CallSize(code_object);
2743 ASSERT(RelocInfo::IsCodeTarget(rmode) ||
2744 rmode == RelocInfo::CODE_AGE_SEQUENCE);
2745 call(code_object, rmode, ast_id);
2747 CHECK_EQ(end_position, pc_offset());
2752 void MacroAssembler::Pushad() {
2772 leap(
rsp, Operand(
rsp, -sp_delta));
2776 void MacroAssembler::Popad() {
2780 leap(
rsp, Operand(
rsp, sp_delta));
2795 void MacroAssembler::Dropad() {
2823 void MacroAssembler::StoreToSafepointRegisterSlot(Register dst,
2824 const Immediate& imm) {
2825 movp(SafepointRegisterSlot(dst), imm);
2829 void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Register src) {
2830 movp(SafepointRegisterSlot(dst), src);
2834 void MacroAssembler::LoadFromSafepointRegisterSlot(Register dst, Register src) {
2835 movp(dst, SafepointRegisterSlot(src));
2839 Operand MacroAssembler::SafepointRegisterSlot(Register reg) {
2840 return Operand(
rsp, SafepointRegisterStackIndex(reg.code()) *
kPointerSize);
2844 void MacroAssembler::PushTryHandler(StackHandler::Kind kind,
2845 int handler_index) {
2857 if (kind == StackHandler::JS_ENTRY) {
2861 pushq(Immediate(0));
2862 Push(Smi::FromInt(0));
2870 StackHandler::IndexField::encode(handler_index) |
2871 StackHandler::KindField::encode(kind);
2872 Push(Immediate(state));
2876 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
2877 Push(ExternalOperand(handler_address));
2879 movp(ExternalOperand(handler_address),
rsp);
2883 void MacroAssembler::PopTryHandler() {
2885 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
2886 Pop(ExternalOperand(handler_address));
2891 void MacroAssembler::JumpToHandlerEntry() {
2896 shr(
rdx, Immediate(StackHandler::kKindWidth));
2899 SmiToInteger64(
rdx,
rdx);
2905 void MacroAssembler::Throw(Register value) {
2916 if (!value.is(
rax)) {
2920 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
2921 movp(
rsp, ExternalOperand(handler_address));
2923 Pop(ExternalOperand(handler_address));
2938 j(
zero, &skip, Label::kNear);
2939 movp(Operand(
rbp, StandardFrameConstants::kContextOffset),
rsi);
2942 JumpToHandlerEntry();
2946 void MacroAssembler::ThrowUncatchable(Register value) {
2957 if (!value.is(
rax)) {
2961 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
2965 Label fetch_next, check_kind;
2966 jmp(&check_kind, Label::kNear);
2968 movp(
rsp, Operand(
rsp, StackHandlerConstants::kNextOffset));
2972 testl(Operand(
rsp, StackHandlerConstants::kStateOffset),
2973 Immediate(StackHandler::KindField::kMask));
2977 Pop(ExternalOperand(handler_address));
2987 JumpToHandlerEntry();
2991 void MacroAssembler::Ret() {
2996 void MacroAssembler::Ret(
int bytes_dropped, Register scratch) {
2997 if (is_uint16(bytes_dropped)) {
3000 PopReturnAddressTo(scratch);
3001 addp(
rsp, Immediate(bytes_dropped));
3002 PushReturnAddressFrom(scratch);
3008 void MacroAssembler::FCmp() {
3014 void MacroAssembler::CmpObjectType(Register heap_object,
3017 movp(map,
FieldOperand(heap_object, HeapObject::kMapOffset));
3018 CmpInstanceType(map, type);
3022 void MacroAssembler::CmpInstanceType(Register map,
InstanceType type) {
3024 Immediate(static_cast<int8_t>(type)));
3028 void MacroAssembler::CheckFastElements(Register map,
3030 Label::Distance distance) {
3036 Immediate(Map::kMaximumBitField2FastHoleyElementValue));
3037 j(
above, fail, distance);
3041 void MacroAssembler::CheckFastObjectElements(Register map,
3043 Label::Distance distance) {
3049 Immediate(Map::kMaximumBitField2FastHoleySmiElementValue));
3052 Immediate(Map::kMaximumBitField2FastHoleyElementValue));
3053 j(
above, fail, distance);
3057 void MacroAssembler::CheckFastSmiElements(Register map,
3059 Label::Distance distance) {
3063 Immediate(Map::kMaximumBitField2FastHoleySmiElementValue));
3064 j(
above, fail, distance);
3068 void MacroAssembler::StoreNumberToDoubleElements(
3069 Register maybe_number,
3072 XMMRegister xmm_scratch,
3074 int elements_offset) {
3075 Label smi_value, is_nan, maybe_nan, not_nan, have_double_value, done;
3077 JumpIfSmi(maybe_number, &smi_value, Label::kNear);
3079 CheckMap(maybe_number,
3080 isolate()->factory()->heap_number_map(),
3091 movsd(xmm_scratch,
FieldOperand(maybe_number, HeapNumber::kValueOffset));
3092 bind(&have_double_value);
3094 FixedDoubleArray::kHeaderSize - elements_offset),
3101 j(
greater, &is_nan, Label::kNear);
3102 cmpl(
FieldOperand(maybe_number, HeapNumber::kValueOffset), Immediate(0));
3108 FixedDoubleArray::canonical_not_the_hole_nan_as_double()));
3110 jmp(&have_double_value, Label::kNear);
3118 FixedDoubleArray::kHeaderSize - elements_offset),
3124 void MacroAssembler::CompareMap(Register
obj, Handle<Map> map) {
3129 void MacroAssembler::CheckMap(Register obj,
3134 JumpIfSmi(obj, fail);
3137 CompareMap(obj, map);
3142 void MacroAssembler::ClampUint8(Register reg) {
3144 testl(reg, Immediate(0xFFFFFF00));
3145 j(
zero, &done, Label::kNear);
3146 setcc(negative, reg);
3152 void MacroAssembler::ClampDoubleToUint8(XMMRegister input_reg,
3153 XMMRegister temp_xmm_reg,
3154 Register result_reg) {
3157 xorps(temp_xmm_reg, temp_xmm_reg);
3158 cvtsd2si(result_reg, input_reg);
3159 testl(result_reg, Immediate(0xFFFFFF00));
3160 j(
zero, &done, Label::kNear);
3161 cmpl(result_reg, Immediate(1));
3162 j(
overflow, &conv_failure, Label::kNear);
3163 movl(result_reg, Immediate(0));
3164 setcc(
sign, result_reg);
3165 subl(result_reg, Immediate(1));
3166 andl(result_reg, Immediate(255));
3167 jmp(&done, Label::kNear);
3168 bind(&conv_failure);
3170 ucomisd(input_reg, temp_xmm_reg);
3171 j(
below, &done, Label::kNear);
3172 Set(result_reg, 255);
3177 void MacroAssembler::LoadUint32(XMMRegister dst,
3179 XMMRegister scratch) {
3180 if (FLAG_debug_code) {
3181 cmpq(src, Immediate(0xffffffff));
3182 Assert(
below_equal, kInputGPRIsExpectedToHaveUpper32Cleared);
3184 cvtqsi2sd(dst, src);
3188 void MacroAssembler::SlowTruncateToI(Register result_reg,
3191 DoubleToIStub stub(input_reg, result_reg, offset,
true);
3192 call(stub.GetCode(isolate()), RelocInfo::CODE_TARGET);
3196 void MacroAssembler::TruncateHeapNumberToI(Register result_reg,
3197 Register input_reg) {
3200 cvttsd2siq(result_reg,
xmm0);
3201 cmpq(result_reg, Immediate(1));
3205 if (input_reg.is(result_reg)) {
3208 SlowTruncateToI(result_reg,
rsp, 0);
3211 SlowTruncateToI(result_reg, input_reg);
3218 void MacroAssembler::TruncateDoubleToI(Register result_reg,
3219 XMMRegister input_reg) {
3221 cvttsd2siq(result_reg, input_reg);
3222 cmpq(result_reg, Immediate(1));
3227 SlowTruncateToI(result_reg,
rsp, 0);
3234 void MacroAssembler::DoubleToI(Register result_reg,
3235 XMMRegister input_reg,
3236 XMMRegister scratch,
3238 Label* conversion_failed,
3239 Label::Distance dst) {
3240 cvttsd2si(result_reg, input_reg);
3241 Cvtlsi2sd(
xmm0, result_reg);
3242 ucomisd(
xmm0, input_reg);
3249 testl(result_reg, result_reg);
3251 movmskpd(result_reg, input_reg);
3255 andl(result_reg, Immediate(1));
3256 j(
not_zero, conversion_failed, dst);
3262 void MacroAssembler::TaggedToI(Register result_reg,
3266 Label* lost_precision,
3267 Label::Distance dst) {
3272 CompareRoot(
FieldOperand(input_reg, HeapObject::kMapOffset),
3273 Heap::kHeapNumberMapRootIndex);
3277 cvttsd2si(result_reg,
xmm0);
3278 Cvtlsi2sd(temp, result_reg);
3279 ucomisd(
xmm0, temp);
3280 RecordComment(
"Deferred TaggedToI: lost precision");
3282 RecordComment(
"Deferred TaggedToI: NaN");
3285 testl(result_reg, result_reg);
3287 movmskpd(result_reg,
xmm0);
3288 andl(result_reg, Immediate(1));
3299 RecordComment(
"Throw message: ");
3305 Push(Smi::FromInt(reason));
3310 CallRuntime(Runtime::kHiddenThrowMessage, 1);
3312 CallRuntime(Runtime::kHiddenThrowMessage, 1);
3328 void MacroAssembler::LoadInstanceDescriptors(Register map,
3329 Register descriptors) {
3330 movp(descriptors,
FieldOperand(map, Map::kDescriptorsOffset));
3334 void MacroAssembler::NumberOfOwnDescriptors(Register dst, Register map) {
3336 DecodeField<Map::NumberOfOwnDescriptorsBits>(dst);
3340 void MacroAssembler::EnumLength(Register dst, Register map) {
3348 void MacroAssembler::DispatchMap(Register obj,
3351 Handle<Code> success,
3355 JumpIfSmi(obj, &fail);
3358 j(
equal, success, RelocInfo::CODE_TARGET);
3364 void MacroAssembler::AssertNumber(Register
object) {
3365 if (emit_debug_code()) {
3368 j(is_smi, &ok, Label::kNear);
3370 isolate()->factory()->heap_number_map());
3371 Check(
equal, kOperandIsNotANumber);
3377 void MacroAssembler::AssertNotSmi(Register
object) {
3378 if (emit_debug_code()) {
3385 void MacroAssembler::AssertSmi(Register
object) {
3386 if (emit_debug_code()) {
3388 Check(is_smi, kOperandIsNotASmi);
3393 void MacroAssembler::AssertSmi(
const Operand&
object) {
3394 if (emit_debug_code()) {
3396 Check(is_smi, kOperandIsNotASmi);
3401 void MacroAssembler::AssertZeroExtended(Register int32_register) {
3402 if (emit_debug_code()) {
3406 Check(
above_equal, k32BitValueInRegisterIsNotZeroExtended);
3411 void MacroAssembler::AssertString(Register
object) {
3412 if (emit_debug_code()) {
3414 Check(
not_equal, kOperandIsASmiAndNotAString);
3416 movp(
object,
FieldOperand(
object, HeapObject::kMapOffset));
3419 Check(
below, kOperandIsNotAString);
3424 void MacroAssembler::AssertName(Register
object) {
3425 if (emit_debug_code()) {
3427 Check(
not_equal, kOperandIsASmiAndNotAName);
3429 movp(
object,
FieldOperand(
object, HeapObject::kMapOffset));
3437 void MacroAssembler::AssertUndefinedOrAllocationSite(Register
object) {
3438 if (emit_debug_code()) {
3439 Label done_checking;
3440 AssertNotSmi(
object);
3441 Cmp(
object, isolate()->factory()->undefined_value());
3442 j(
equal, &done_checking);
3443 Cmp(
FieldOperand(
object, 0), isolate()->factory()->allocation_site_map());
3444 Assert(
equal, kExpectedUndefinedOrCell);
3445 bind(&done_checking);
3450 void MacroAssembler::AssertRootValue(Register src,
3451 Heap::RootListIndex root_value_index,
3453 if (emit_debug_code()) {
3457 Check(
equal, reason);
3463 Condition MacroAssembler::IsObjectStringType(Register heap_object,
3465 Register instance_type) {
3466 movp(map,
FieldOperand(heap_object, HeapObject::kMapOffset));
3467 movzxbl(instance_type,
FieldOperand(map, Map::kInstanceTypeOffset));
3474 Condition MacroAssembler::IsObjectNameType(Register heap_object,
3476 Register instance_type) {
3477 movp(map,
FieldOperand(heap_object, HeapObject::kMapOffset));
3478 movzxbl(instance_type,
FieldOperand(map, Map::kInstanceTypeOffset));
3479 cmpb(instance_type, Immediate(static_cast<uint8_t>(
LAST_NAME_TYPE)));
3484 void MacroAssembler::TryGetFunctionPrototype(Register
function,
3487 bool miss_on_bound_function) {
3496 if (miss_on_bound_function) {
3498 FieldOperand(
function, JSFunction::kSharedFunctionInfoOffset));
3502 SharedFunctionInfo::kCompilerHintsOffset),
3503 SharedFunctionInfo::kBoundFunction);
3510 Immediate(1 << Map::kHasNonInstancePrototype));
3511 j(
not_zero, &non_instance, Label::kNear);
3515 FieldOperand(
function, JSFunction::kPrototypeOrInitialMapOffset));
3520 CompareRoot(result, Heap::kTheHoleValueRootIndex);
3529 movp(result,
FieldOperand(result, Map::kPrototypeOffset));
3530 jmp(&done, Label::kNear);
3534 bind(&non_instance);
3535 movp(result,
FieldOperand(result, Map::kConstructorOffset));
3542 void MacroAssembler::SetCounter(StatsCounter* counter,
int value) {
3543 if (FLAG_native_code_counters && counter->Enabled()) {
3544 Operand counter_operand = ExternalOperand(ExternalReference(counter));
3545 movl(counter_operand, Immediate(value));
3550 void MacroAssembler::IncrementCounter(StatsCounter* counter,
int value) {
3552 if (FLAG_native_code_counters && counter->Enabled()) {
3553 Operand counter_operand = ExternalOperand(ExternalReference(counter));
3555 incl(counter_operand);
3557 addl(counter_operand, Immediate(value));
3563 void MacroAssembler::DecrementCounter(StatsCounter* counter,
int value) {
3565 if (FLAG_native_code_counters && counter->Enabled()) {
3566 Operand counter_operand = ExternalOperand(ExternalReference(counter));
3568 decl(counter_operand);
3570 subl(counter_operand, Immediate(value));
3576 #ifdef ENABLE_DEBUGGER_SUPPORT
3577 void MacroAssembler::DebugBreak() {
3579 LoadAddress(
rbx, ExternalReference(Runtime::kDebugBreak, isolate()));
3581 ASSERT(AllowThisStubCall(&ces));
3584 #endif // ENABLE_DEBUGGER_SUPPORT
3587 void MacroAssembler::InvokeCode(Register
code,
3588 const ParameterCount& expected,
3589 const ParameterCount& actual,
3591 const CallWrapper& call_wrapper) {
3596 bool definitely_mismatches =
false;
3597 InvokePrologue(expected,
3599 Handle<Code>::null(),
3602 &definitely_mismatches,
3606 if (!definitely_mismatches) {
3608 call_wrapper.BeforeCall(CallSize(code));
3610 call_wrapper.AfterCall();
3620 void MacroAssembler::InvokeFunction(Register
function,
3621 const ParameterCount& actual,
3623 const CallWrapper& call_wrapper) {
3628 movp(
rdx,
FieldOperand(
function, JSFunction::kSharedFunctionInfoOffset));
3636 ParameterCount expected(
rbx);
3637 InvokeCode(
rdx, expected, actual, flag, call_wrapper);
3641 void MacroAssembler::InvokeFunction(Register
function,
3642 const ParameterCount& expected,
3643 const ParameterCount& actual,
3645 const CallWrapper& call_wrapper) {
3655 InvokeCode(
rdx, expected, actual, flag, call_wrapper);
3659 void MacroAssembler::InvokeFunction(Handle<JSFunction>
function,
3660 const ParameterCount& expected,
3661 const ParameterCount& actual,
3663 const CallWrapper& call_wrapper) {
3664 Move(
rdi,
function);
3665 InvokeFunction(
rdi, expected, actual, flag, call_wrapper);
3669 void MacroAssembler::InvokePrologue(
const ParameterCount& expected,
3670 const ParameterCount& actual,
3671 Handle<Code> code_constant,
3672 Register code_register,
3674 bool* definitely_mismatches,
3676 Label::Distance near_jump,
3677 const CallWrapper& call_wrapper) {
3678 bool definitely_matches =
false;
3679 *definitely_mismatches =
false;
3681 if (expected.is_immediate()) {
3682 ASSERT(actual.is_immediate());
3683 if (expected.immediate() == actual.immediate()) {
3684 definitely_matches =
true;
3686 Set(
rax, actual.immediate());
3687 if (expected.immediate() ==
3688 SharedFunctionInfo::kDontAdaptArgumentsSentinel) {
3693 definitely_matches =
true;
3695 *definitely_mismatches =
true;
3696 Set(
rbx, expected.immediate());
3700 if (actual.is_immediate()) {
3704 cmpp(expected.reg(), Immediate(actual.immediate()));
3705 j(
equal, &invoke, Label::kNear);
3707 Set(
rax, actual.immediate());
3708 }
else if (!expected.reg().is(actual.reg())) {
3711 cmpp(expected.reg(), actual.reg());
3712 j(
equal, &invoke, Label::kNear);
3718 if (!definitely_matches) {
3719 Handle<Code> adaptor = isolate()->builtins()->ArgumentsAdaptorTrampoline();
3720 if (!code_constant.is_null()) {
3721 Move(
rdx, code_constant, RelocInfo::EMBEDDED_OBJECT);
3723 }
else if (!code_register.is(
rdx)) {
3724 movp(
rdx, code_register);
3728 call_wrapper.BeforeCall(CallSize(adaptor));
3729 Call(adaptor, RelocInfo::CODE_TARGET);
3730 call_wrapper.AfterCall();
3731 if (!*definitely_mismatches) {
3732 jmp(done, near_jump);
3735 Jump(adaptor, RelocInfo::CODE_TARGET);
3749 PredictableCodeSizeScope predictible_code_size_scope(
this,
3750 kNoCodeAgeSequenceLength);
3751 if (isolate()->IsCodePreAgingActive()) {
3753 Call(isolate()->builtins()->MarkCodeAsExecutedOnce(),
3754 RelocInfo::CODE_AGE_SEQUENCE);
3755 Nop(kNoCodeAgeSequenceLength - Assembler::kShortCallInstructionLength);
3770 Push(Smi::FromInt(type));
3773 if (emit_debug_code()) {
3775 isolate()->factory()->undefined_value(),
3776 RelocInfo::EMBEDDED_OBJECT);
3778 Check(
not_equal, kCodeObjectNotProperlyPatched);
3784 if (emit_debug_code()) {
3787 Check(
equal, kStackFrameTypesMustMatch);
3794 void MacroAssembler::EnterExitFramePrologue(
bool save_rax) {
3797 ASSERT(ExitFrameConstants::kCallerSPDisplacement ==
3815 Store(ExternalReference(Isolate::kCEntryFPAddress, isolate()),
rbp);
3816 Store(ExternalReference(Isolate::kContextAddress, isolate()),
rsi);
3820 void MacroAssembler::EnterExitFrameEpilogue(
int arg_stack_space,
3821 bool save_doubles) {
3823 const int kShadowSpace = 4;
3824 arg_stack_space += kShadowSpace;
3828 int space = XMMRegister::kMaxNumAllocatableRegisters *
kDoubleSize +
3830 subp(
rsp, Immediate(space));
3832 for (
int i = 0; i < XMMRegister::NumAllocatableRegisters(); i++) {
3833 XMMRegister reg = XMMRegister::FromAllocationIndex(i);
3836 }
else if (arg_stack_space > 0) {
3837 subp(
rsp, Immediate(arg_stack_space * kRegisterSize));
3841 const int kFrameAlignment = OS::ActivationFrameAlignment();
3842 if (kFrameAlignment > 0) {
3844 ASSERT(is_int8(kFrameAlignment));
3845 andp(
rsp, Immediate(-kFrameAlignment));
3849 movp(Operand(
rbp, ExitFrameConstants::kSPOffset),
rsp);
3853 void MacroAssembler::EnterExitFrame(
int arg_stack_space,
bool save_doubles) {
3854 EnterExitFramePrologue(
true);
3858 int offset = StandardFrameConstants::kCallerSPOffset -
kPointerSize;
3861 EnterExitFrameEpilogue(arg_stack_space, save_doubles);
3865 void MacroAssembler::EnterApiExitFrame(
int arg_stack_space) {
3866 EnterExitFramePrologue(
false);
3867 EnterExitFrameEpilogue(arg_stack_space,
false);
3871 void MacroAssembler::LeaveExitFrame(
bool save_doubles) {
3876 for (
int i = 0; i < XMMRegister::NumAllocatableRegisters(); i++) {
3877 XMMRegister reg = XMMRegister::FromAllocationIndex(i);
3883 movp(
rbp, Operand(
rbp, 0 * kPointerSize));
3887 leap(
rsp, Operand(
r15, 1 * kPointerSize));
3889 PushReturnAddressFrom(
rcx);
3891 LeaveExitFrameEpilogue(
true);
3895 void MacroAssembler::LeaveApiExitFrame(
bool restore_context) {
3899 LeaveExitFrameEpilogue(restore_context);
3903 void MacroAssembler::LeaveExitFrameEpilogue(
bool restore_context) {
3905 ExternalReference context_address(Isolate::kContextAddress, isolate());
3906 Operand context_operand = ExternalOperand(context_address);
3907 if (restore_context) {
3908 movp(
rsi, context_operand);
3911 movp(context_operand, Immediate(0));
3915 ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress,
3917 Operand c_entry_fp_operand = ExternalOperand(c_entry_fp_address);
3918 movp(c_entry_fp_operand, Immediate(0));
3922 void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
3925 Label same_contexts;
3927 ASSERT(!holder_reg.is(scratch));
3930 movp(scratch, Operand(
rbp, StandardFrameConstants::kContextOffset));
3933 if (emit_debug_code()) {
3934 cmpp(scratch, Immediate(0));
3935 Check(
not_equal, kWeShouldNotHaveAnEmptyLexicalContext);
3939 Context::kHeaderSize + Context::GLOBAL_OBJECT_INDEX *
kPointerSize;
3941 movp(scratch,
FieldOperand(scratch, GlobalObject::kNativeContextOffset));
3944 if (emit_debug_code()) {
3946 isolate()->factory()->native_context_map());
3947 Check(
equal, kJSGlobalObjectNativeContextShouldBeANativeContext);
3951 cmpp(scratch,
FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
3952 j(
equal, &same_contexts);
3960 if (emit_debug_code()) {
3964 FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
3965 CompareRoot(holder_reg, Heap::kNullValueRootIndex);
3966 Check(
not_equal, kJSGlobalProxyContextShouldNotBeNull);
3969 movp(holder_reg,
FieldOperand(holder_reg, HeapObject::kMapOffset));
3970 CompareRoot(holder_reg, Heap::kNativeContextMapRootIndex);
3971 Check(
equal, kJSGlobalObjectNativeContextShouldBeANativeContext);
3976 FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
3978 Context::kHeaderSize + Context::SECURITY_TOKEN_INDEX *
kPointerSize;
3983 bind(&same_contexts);
3990 void MacroAssembler::GetNumberHash(Register
r0, Register scratch) {
3992 LoadRoot(scratch, Heap::kHashSeedRootIndex);
3993 SmiToInteger32(scratch, scratch);
4004 shll(scratch, Immediate(15));
4008 shrl(scratch, Immediate(12));
4011 leal(r0, Operand(r0, r0,
times_4, 0));
4014 shrl(scratch, Immediate(4));
4017 imull(r0, r0, Immediate(2057));
4020 shrl(scratch, Immediate(16));
4026 void MacroAssembler::LoadFromNumberDictionary(Label* miss,
4056 GetNumberHash(r0, r1);
4060 SeededNumberDictionary::kCapacityOffset));
4064 for (
int i = 0; i < kNumberDictionaryProbes; i++) {
4069 addl(r2, Immediate(SeededNumberDictionary::GetProbeOffset(i)));
4074 ASSERT(SeededNumberDictionary::kEntrySize == 3);
4075 leap(r2, Operand(r2, r2,
times_2, 0));
4081 SeededNumberDictionary::kElementsStartOffset));
4082 if (i != (kNumberDictionaryProbes - 1)) {
4091 const int kDetailsOffset =
4092 SeededNumberDictionary::kElementsStartOffset + 2 *
kPointerSize;
4095 Smi::FromInt(PropertyDetails::TypeField::kMask));
4099 const int kValueOffset =
4100 SeededNumberDictionary::kElementsStartOffset +
kPointerSize;
4105 void MacroAssembler::LoadAllocationTopHelper(Register result,
4108 ExternalReference allocation_top =
4109 AllocationUtils::GetAllocationTopReference(isolate(), flags);
4114 ASSERT(!scratch.is_valid());
4117 Operand top_operand = ExternalOperand(allocation_top);
4118 cmpp(result, top_operand);
4119 Check(
equal, kUnexpectedAllocationTop);
4126 if (scratch.is_valid()) {
4127 LoadAddress(scratch, allocation_top);
4128 movp(result, Operand(scratch, 0));
4130 Load(result, allocation_top);
4135 void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
4138 if (emit_debug_code()) {
4140 Check(
zero, kUnalignedAllocationInNewSpace);
4143 ExternalReference allocation_top =
4144 AllocationUtils::GetAllocationTopReference(isolate(), flags);
4147 if (scratch.is_valid()) {
4149 movp(Operand(scratch, 0), result_end);
4151 Store(allocation_top, result_end);
4156 void MacroAssembler::Allocate(
int object_size,
4158 Register result_end,
4163 ASSERT(object_size <= Page::kMaxRegularHeapObjectSize);
4164 if (!FLAG_inline_new) {
4165 if (emit_debug_code()) {
4167 movl(result, Immediate(0x7091));
4168 if (result_end.is_valid()) {
4169 movl(result_end, Immediate(0x7191));
4171 if (scratch.is_valid()) {
4172 movl(scratch, Immediate(0x7291));
4178 ASSERT(!result.is(result_end));
4181 LoadAllocationTopHelper(result, scratch, flags);
4187 Check(
zero, kAllocationIsNotDoubleAligned);
4191 ExternalReference allocation_limit =
4192 AllocationUtils::GetAllocationLimitReference(isolate(), flags);
4194 Register top_reg = result_end.is_valid() ? result_end : result;
4196 if (!top_reg.is(result)) {
4197 movp(top_reg, result);
4199 addp(top_reg, Immediate(object_size));
4200 j(
carry, gc_required);
4201 Operand limit_operand = ExternalOperand(allocation_limit);
4202 cmpp(top_reg, limit_operand);
4203 j(
above, gc_required);
4206 UpdateAllocationTopHelper(top_reg, scratch, flags);
4209 if (top_reg.is(result)) {
4213 subp(result, Immediate(object_size));
4215 }
else if (tag_result) {
4223 void MacroAssembler::Allocate(
int header_size,
4225 Register element_count,
4227 Register result_end,
4232 leap(result_end, Operand(element_count, element_size, header_size));
4233 Allocate(result_end, result, result_end, scratch, gc_required, flags);
4237 void MacroAssembler::Allocate(Register object_size,
4239 Register result_end,
4244 if (!FLAG_inline_new) {
4245 if (emit_debug_code()) {
4247 movl(result, Immediate(0x7091));
4248 movl(result_end, Immediate(0x7191));
4249 if (scratch.is_valid()) {
4250 movl(scratch, Immediate(0x7291));
4257 ASSERT(!result.is(result_end));
4260 LoadAllocationTopHelper(result, scratch, flags);
4264 if (((flags & DOUBLE_ALIGNMENT) != 0) && FLAG_debug_code) {
4266 Check(
zero, kAllocationIsNotDoubleAligned);
4270 ExternalReference allocation_limit =
4271 AllocationUtils::GetAllocationLimitReference(isolate(), flags);
4272 if (!object_size.is(result_end)) {
4273 movp(result_end, object_size);
4275 addp(result_end, result);
4276 j(
carry, gc_required);
4277 Operand limit_operand = ExternalOperand(allocation_limit);
4278 cmpp(result_end, limit_operand);
4279 j(
above, gc_required);
4282 UpdateAllocationTopHelper(result_end, scratch, flags);
4291 void MacroAssembler::UndoAllocationInNewSpace(Register
object) {
4292 ExternalReference new_space_allocation_top =
4293 ExternalReference::new_space_allocation_top_address(isolate());
4297 Operand top_operand = ExternalOperand(new_space_allocation_top);
4299 cmpp(
object, top_operand);
4300 Check(
below, kUndoAllocationOfNonAllocatedMemory);
4302 movp(top_operand,
object);
4306 void MacroAssembler::AllocateHeapNumber(Register result,
4308 Label* gc_required) {
4310 Allocate(HeapNumber::kSize, result, scratch,
no_reg, gc_required, TAG_OBJECT);
4318 void MacroAssembler::AllocateTwoByteString(Register result,
4323 Label* gc_required) {
4326 const int kHeaderAlignment = SeqTwoByteString::kHeaderSize &
4330 leap(scratch1, Operand(length, length,
times_1, kObjectAlignmentMask +
4332 andp(scratch1, Immediate(~kObjectAlignmentMask));
4333 if (kHeaderAlignment > 0) {
4334 subp(scratch1, Immediate(kHeaderAlignment));
4338 Allocate(SeqTwoByteString::kHeaderSize,
4350 Integer32ToSmi(scratch1, length);
4351 movp(
FieldOperand(result, String::kLengthOffset), scratch1);
4353 Immediate(String::kEmptyHashField));
4357 void MacroAssembler::AllocateAsciiString(Register result,
4362 Label* gc_required) {
4365 const int kHeaderAlignment = SeqOneByteString::kHeaderSize &
4367 movl(scratch1, length);
4369 addp(scratch1, Immediate(kObjectAlignmentMask + kHeaderAlignment));
4370 andp(scratch1, Immediate(~kObjectAlignmentMask));
4371 if (kHeaderAlignment > 0) {
4372 subp(scratch1, Immediate(kHeaderAlignment));
4376 Allocate(SeqOneByteString::kHeaderSize,
4388 Integer32ToSmi(scratch1, length);
4389 movp(
FieldOperand(result, String::kLengthOffset), scratch1);
4391 Immediate(String::kEmptyHashField));
4395 void MacroAssembler::AllocateTwoByteConsString(Register result,
4398 Label* gc_required) {
4400 Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required,
4409 void MacroAssembler::AllocateAsciiConsString(Register result,
4412 Label* gc_required) {
4413 Label allocate_new_space, install_map;
4416 ExternalReference high_promotion_mode = ExternalReference::
4417 new_space_high_promotion_mode_active_address(isolate());
4419 Load(scratch1, high_promotion_mode);
4420 testb(scratch1, Immediate(1));
4421 j(
zero, &allocate_new_space);
4422 Allocate(ConsString::kSize,
4431 bind(&allocate_new_space);
4432 Allocate(ConsString::kSize,
4447 void MacroAssembler::AllocateTwoByteSlicedString(Register result,
4450 Label* gc_required) {
4452 Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
4461 void MacroAssembler::AllocateAsciiSlicedString(Register result,
4464 Label* gc_required) {
4466 Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
4488 if (emit_debug_code()) {
4489 cmpl(length, Immediate(min_length));
4492 Label short_loop, len8, len16, len24, done, short_string;
4495 if (min_length <= kLongStringLimit) {
4496 cmpl(length, Immediate(kPointerSize));
4497 j(
below, &short_string, Label::kNear);
4504 if (min_length <= kLongStringLimit) {
4505 cmpl(length, Immediate(2 * kPointerSize));
4507 cmpl(length, Immediate(3 * kPointerSize));
4509 cmpl(length, Immediate(4 * kPointerSize));
4516 movp(scratch, length);
4520 andl(scratch, Immediate(kPointerSize - 1));
4521 movp(length, Operand(source, scratch,
times_1, -kPointerSize));
4522 movp(Operand(destination, scratch,
times_1, -kPointerSize), length);
4523 addp(destination, scratch);
4525 if (min_length <= kLongStringLimit) {
4526 jmp(&done, Label::kNear);
4528 movp(scratch, Operand(source, 2 * kPointerSize));
4529 movp(Operand(destination, 2 * kPointerSize), scratch);
4531 movp(scratch, Operand(source, kPointerSize));
4532 movp(Operand(destination, kPointerSize), scratch);
4534 movp(scratch, Operand(source, 0));
4535 movp(Operand(destination, 0), scratch);
4537 movp(scratch, Operand(source, length,
times_1, -kPointerSize));
4538 movp(Operand(destination, length,
times_1, -kPointerSize), scratch);
4539 addp(destination, length);
4540 jmp(&done, Label::kNear);
4542 bind(&short_string);
4543 if (min_length == 0) {
4544 testl(length, length);
4545 j(
zero, &done, Label::kNear);
4549 movb(scratch, Operand(source, 0));
4550 movb(Operand(destination, 0), scratch);
4561 void MacroAssembler::InitializeFieldsWithFiller(Register start_offset,
4562 Register end_offset,
4567 movp(Operand(start_offset, 0), filler);
4568 addp(start_offset, Immediate(kPointerSize));
4570 cmpp(start_offset, end_offset);
4575 void MacroAssembler::LoadContext(Register dst,
int context_chain_length) {
4576 if (context_chain_length > 0) {
4578 movp(dst, Operand(
rsi, Context::SlotOffset(Context::PREVIOUS_INDEX)));
4579 for (
int i = 1; i < context_chain_length; i++) {
4580 movp(dst, Operand(dst, Context::SlotOffset(Context::PREVIOUS_INDEX)));
4593 if (emit_debug_code()) {
4595 Heap::kWithContextMapRootIndex);
4596 Check(
not_equal, kVariableResolvedToWithContext);
4601 void MacroAssembler::LoadTransitionedArrayMapConditional(
4604 Register map_in_out,
4606 Label* no_map_match) {
4609 Operand(
rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
4610 movp(scratch,
FieldOperand(scratch, GlobalObject::kNativeContextOffset));
4613 movp(scratch, Operand(scratch,
4614 Context::SlotOffset(Context::JS_ARRAY_MAPS_INDEX)));
4616 int offset = expected_kind * kPointerSize +
4617 FixedArrayBase::kHeaderSize;
4622 offset = transitioned_kind * kPointerSize +
4623 FixedArrayBase::kHeaderSize;
4629 static const int kRegisterPassedArguments = 4;
4631 static const int kRegisterPassedArguments = 6;
4634 void MacroAssembler::LoadGlobalFunction(
int index, Register
function) {
4637 Operand(
rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
4639 movp(
function,
FieldOperand(
function, GlobalObject::kNativeContextOffset));
4641 movp(
function, Operand(
function, Context::SlotOffset(index)));
4645 void MacroAssembler::LoadGlobalFunctionInitialMap(Register
function,
4648 movp(map,
FieldOperand(
function, JSFunction::kPrototypeOrInitialMapOffset));
4649 if (emit_debug_code()) {
4651 CheckMap(map, isolate()->factory()->meta_map(), &fail,
DO_SMI_CHECK);
4654 Abort(kGlobalFunctionsMustHaveInitialMap);
4660 int MacroAssembler::ArgumentStackSlotsForCFunctionCall(
int num_arguments) {
4667 ASSERT(num_arguments >= 0);
4669 const int kMinimumStackSlots = kRegisterPassedArguments;
4670 if (num_arguments < kMinimumStackSlots)
return kMinimumStackSlots;
4671 return num_arguments;
4673 if (num_arguments < kRegisterPassedArguments)
return 0;
4674 return num_arguments - kRegisterPassedArguments;
4679 void MacroAssembler::EmitSeqStringSetCharCheck(Register
string,
4682 uint32_t encoding_mask) {
4684 JumpIfNotSmi(
string, &is_object);
4689 movp(value,
FieldOperand(
string, HeapObject::kMapOffset));
4690 movzxbp(value,
FieldOperand(value, Map::kInstanceTypeOffset));
4693 cmpp(value, Immediate(encoding_mask));
4695 Check(
equal, kUnexpectedStringType);
4700 Integer32ToSmi(index, index);
4701 SmiCompare(index,
FieldOperand(
string, String::kLengthOffset));
4702 Check(
less, kIndexIsTooLarge);
4704 SmiCompare(index, Smi::FromInt(0));
4708 SmiToInteger32(index, index);
4712 void MacroAssembler::PrepareCallCFunction(
int num_arguments) {
4713 int frame_alignment = OS::ActivationFrameAlignment();
4714 ASSERT(frame_alignment != 0);
4715 ASSERT(num_arguments >= 0);
4720 int argument_slots_on_stack =
4721 ArgumentStackSlotsForCFunctionCall(num_arguments);
4722 subp(
rsp, Immediate((argument_slots_on_stack + 1) * kRegisterSize));
4723 andp(
rsp, Immediate(-frame_alignment));
4728 void MacroAssembler::CallCFunction(ExternalReference
function,
4729 int num_arguments) {
4730 LoadAddress(
rax,
function);
4731 CallCFunction(
rax, num_arguments);
4735 void MacroAssembler::CallCFunction(Register
function,
int num_arguments) {
4738 if (emit_debug_code()) {
4739 CheckStackAlignment();
4743 ASSERT(OS::ActivationFrameAlignment() != 0);
4744 ASSERT(num_arguments >= 0);
4745 int argument_slots_on_stack =
4746 ArgumentStackSlotsForCFunctionCall(num_arguments);
4747 movp(
rsp, Operand(
rsp, argument_slots_on_stack * kRegisterSize));
4751 bool AreAliased(Register r1, Register r2, Register
r3, Register
r4) {
4752 if (r1.is(r2))
return true;
4753 if (r1.is(r3))
return true;
4754 if (r1.is(r4))
return true;
4755 if (r2.is(r3))
return true;
4756 if (r2.is(r4))
return true;
4757 if (r3.is(r4))
return true;
4762 CodePatcher::CodePatcher(
byte* address,
int size)
4763 : address_(address),
4765 masm_(
NULL, address, size + Assembler::kGap) {
4769 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
4773 CodePatcher::~CodePatcher() {
4775 CPU::FlushICache(address_, size_);
4778 ASSERT(masm_.pc_ == address_ + size_);
4779 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
4783 void MacroAssembler::CheckPageFlag(
4788 Label* condition_met,
4789 Label::Distance condition_met_distance) {
4791 if (scratch.is(
object)) {
4792 andp(scratch, Immediate(~Page::kPageAlignmentMask));
4794 movp(scratch, Immediate(~Page::kPageAlignmentMask));
4795 andp(scratch,
object);
4798 testb(Operand(scratch, MemoryChunk::kFlagsOffset),
4799 Immediate(static_cast<uint8_t>(mask)));
4801 testl(Operand(scratch, MemoryChunk::kFlagsOffset), Immediate(mask));
4803 j(cc, condition_met, condition_met_distance);
4807 void MacroAssembler::CheckMapDeprecated(Handle<Map> map,
4809 Label* if_deprecated) {
4810 if (map->CanBeDeprecated()) {
4812 movp(scratch,
FieldOperand(scratch, Map::kBitField3Offset));
4813 SmiToInteger32(scratch, scratch);
4814 andp(scratch, Immediate(Map::Deprecated::kMask));
4820 void MacroAssembler::JumpIfBlack(Register
object,
4821 Register bitmap_scratch,
4822 Register mask_scratch,
4824 Label::Distance on_black_distance) {
4826 GetMarkBits(
object, bitmap_scratch, mask_scratch);
4828 ASSERT(strcmp(Marking::kBlackBitPattern,
"10") == 0);
4831 movp(
rcx, mask_scratch);
4834 leap(
rcx, Operand(mask_scratch, mask_scratch,
times_2, 0));
4836 andp(
rcx, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
4837 cmpp(mask_scratch,
rcx);
4838 j(
equal, on_black, on_black_distance);
4845 void MacroAssembler::JumpIfDataObject(
4848 Label* not_data_object,
4849 Label::Distance not_data_object_distance) {
4850 Label is_data_object;
4851 movp(scratch,
FieldOperand(value, HeapObject::kMapOffset));
4852 CompareRoot(scratch, Heap::kHeapNumberMapRootIndex);
4853 j(
equal, &is_data_object, Label::kNear);
4860 j(
not_zero, not_data_object, not_data_object_distance);
4861 bind(&is_data_object);
4865 void MacroAssembler::GetMarkBits(Register addr_reg,
4866 Register bitmap_reg,
4867 Register mask_reg) {
4869 movp(bitmap_reg, addr_reg);
4871 andp(bitmap_reg, Immediate(~Page::kPageAlignmentMask));
4872 movp(
rcx, addr_reg);
4875 shrl(
rcx, Immediate(shift));
4877 Immediate((Page::kPageAlignmentMask >> shift) &
4878 ~(Bitmap::kBytesPerCell - 1)));
4880 addp(bitmap_reg,
rcx);
4881 movp(
rcx, addr_reg);
4883 andp(
rcx, Immediate((1 << Bitmap::kBitsPerCellLog2) - 1));
4884 movl(mask_reg, Immediate(1));
4889 void MacroAssembler::EnsureNotWhite(
4891 Register bitmap_scratch,
4892 Register mask_scratch,
4893 Label* value_is_white_and_not_data,
4894 Label::Distance distance) {
4896 GetMarkBits(value, bitmap_scratch, mask_scratch);
4899 ASSERT(strcmp(Marking::kWhiteBitPattern,
"00") == 0);
4900 ASSERT(strcmp(Marking::kBlackBitPattern,
"10") == 0);
4901 ASSERT(strcmp(Marking::kGreyBitPattern,
"11") == 0);
4902 ASSERT(strcmp(Marking::kImpossibleBitPattern,
"01") == 0);
4908 testp(Operand(bitmap_scratch, MemoryChunk::kHeaderSize), mask_scratch);
4911 if (emit_debug_code()) {
4916 addp(mask_scratch, mask_scratch);
4917 testp(Operand(bitmap_scratch, MemoryChunk::kHeaderSize), mask_scratch);
4918 j(
zero, &ok, Label::kNear);
4927 Register length =
rcx;
4928 Label not_heap_number;
4929 Label is_data_object;
4932 movp(map,
FieldOperand(value, HeapObject::kMapOffset));
4933 CompareRoot(map, Heap::kHeapNumberMapRootIndex);
4934 j(
not_equal, ¬_heap_number, Label::kNear);
4935 movp(length, Immediate(HeapNumber::kSize));
4936 jmp(&is_data_object, Label::kNear);
4938 bind(¬_heap_number);
4944 Register instance_type =
rcx;
4945 movzxbl(instance_type,
FieldOperand(map, Map::kInstanceTypeOffset));
4947 j(
not_zero, value_is_white_and_not_data);
4957 j(
zero, ¬_external, Label::kNear);
4958 movp(length, Immediate(ExternalString::kSize));
4959 jmp(&is_data_object, Label::kNear);
4961 bind(¬_external);
4966 addp(length, Immediate(0x04));
4968 imulp(length,
FieldOperand(value, String::kLengthOffset));
4970 addp(length, Immediate(SeqString::kHeaderSize + kObjectAlignmentMask));
4971 andp(length, Immediate(~kObjectAlignmentMask));
4973 bind(&is_data_object);
4976 orp(Operand(bitmap_scratch, MemoryChunk::kHeaderSize), mask_scratch);
4978 andp(bitmap_scratch, Immediate(~Page::kPageAlignmentMask));
4979 addl(Operand(bitmap_scratch, MemoryChunk::kLiveBytesOffset), length);
4985 void MacroAssembler::CheckEnumCache(Register null_value, Label* call_runtime) {
4987 Register empty_fixed_array_value =
r8;
4988 LoadRoot(empty_fixed_array_value, Heap::kEmptyFixedArrayRootIndex);
4996 Cmp(
rdx, Smi::FromInt(kInvalidEnumCacheSentinel));
4997 j(
equal, call_runtime);
5007 Cmp(
rdx, Smi::FromInt(0));
5015 cmpp(empty_fixed_array_value,
5017 j(
equal, &no_elements);
5026 cmpp(
rcx, null_value);
5030 void MacroAssembler::TestJSArrayForAllocationMemento(
5031 Register receiver_reg,
5032 Register scratch_reg,
5033 Label* no_memento_found) {
5034 ExternalReference new_space_start =
5035 ExternalReference::new_space_start(isolate());
5036 ExternalReference new_space_allocation_top =
5037 ExternalReference::new_space_allocation_top_address(isolate());
5039 leap(scratch_reg, Operand(receiver_reg,
5043 j(
less, no_memento_found);
5044 cmpp(scratch_reg, ExternalOperand(new_space_allocation_top));
5046 CompareRoot(
MemOperand(scratch_reg, -AllocationMemento::kSize),
5047 Heap::kAllocationMementoMapRootIndex);
5051 void MacroAssembler::JumpIfDictionaryInPrototypeChain(
5057 ASSERT(!scratch1.is(scratch0));
5058 Register current = scratch0;
5061 movp(current,
object);
5065 movp(current,
FieldOperand(current, HeapObject::kMapOffset));
5066 movp(scratch1,
FieldOperand(current, Map::kBitField2Offset));
5067 andp(scratch1, Immediate(Map::kElementsKindMask));
5068 shr(scratch1, Immediate(Map::kElementsKindShift));
5071 movp(current,
FieldOperand(current, Map::kPrototypeOffset));
5072 CompareRoot(current, Heap::kNullValueRootIndex);
5077 void MacroAssembler::TruncatingDiv(Register dividend,
int32_t divisor) {
5080 MultiplierAndShift ms(divisor);
5081 movl(
rax, Immediate(ms.multiplier()));
5083 if (divisor > 0 && ms.multiplier() < 0) addl(
rdx, dividend);
5084 if (divisor < 0 && ms.multiplier() > 0) subl(
rdx, dividend);
5085 if (ms.shift() > 0) sarl(
rdx, Immediate(ms.shift()));
5086 movl(
rax, dividend);
5087 shrl(
rax, Immediate(31));
5094 #endif // V8_TARGET_ARCH_X64
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter NULL
const intptr_t kSmiTagMask
#define CHECK_EQ(expected, value)
const intptr_t kDoubleAlignmentMask
const uint32_t kNaNOrInfinityLowerBoundUpper32
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths true
#define ASSERT_NOT_NULL(p)
TypeImpl< ZoneTypeConfig > Type
const uint32_t kIsNotInternalizedMask
#define ASSERT(condition)
const int kPointerSizeLog2
const uint32_t kStringRepresentationMask
PerThreadAssertScopeDebugOnly< DEFERRED_HANDLE_DEREFERENCE_ASSERT, true > AllowDeferredHandleDereference
const intptr_t kObjectAlignmentMask
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object size
const intptr_t kHeapObjectTagMask
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in only print modified registers Don t break for ASM_UNIMPLEMENTED_BREAK macros print stack trace when an illegal exception is thrown randomize hashes to avoid predictable hash Fixed seed to use to hash property Print the time it takes to deserialize the snapshot testing_bool_flag testing_int_flag string flag tmp file in which to serialize heap Print the time it takes to lazily compile hydrogen code stubs concurrent_recompilation concurrent_sweeping Print usage including flags
bool is_intn(int64_t x, unsigned n)
kInstanceClassNameOffset flag
const uint32_t kNotStringTag
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object size
#define kNumSafepointSavedRegisters
const uint32_t kIsIndirectStringMask
Operand FieldOperand(Register object, int offset)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf map
bool IsAligned(T value, U alignment)
const uint32_t kHoleNanLower32
int TenToThe(int exponent)
const int kRootRegisterBias
MacroAssembler(Isolate *isolate, void *buffer, int size)
const uint32_t kStringTag
const uint32_t kInternalizedTag
#define T(name, string, precedence)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array shift
void Load(const v8::FunctionCallbackInfo< v8::Value > &args)
bool is(Register reg) const
const uint32_t kIsNotStringMask
const char * GetBailoutReason(BailoutReason reason)
const int kNumSafepointRegisters
const Register kScratchRegister
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
void CopyBytes(uint8_t *target, uint8_t *source)
Condition NegateCondition(Condition cond)
#define ASSERT_EQ(v1, v2)
const uint32_t kOneByteStringTag
const int kSmiConstantRegisterValue
const uint32_t kIsIndirectStringTag
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_string(expose_natives_as
const Register kSmiConstantRegister
#define STATIC_ASSERT(test)
const uint32_t kStringEncodingMask
bool AreAliased(const CPURegister ®1, const CPURegister ®2, const CPURegister ®3=NoReg, const CPURegister ®4=NoReg, const CPURegister ®5=NoReg, const CPURegister ®6=NoReg, const CPURegister ®7=NoReg, const CPURegister ®8=NoReg)