30 #if defined(V8_TARGET_ARCH_X64)
44 : Assembler(arg_isolate, buffer, size),
45 generating_stub_(
false),
46 allow_stub_calls_(
true),
48 root_array_available_(
true) {
49 if (isolate() !=
NULL) {
50 code_object_ = Handle<Object>(isolate()->heap()->undefined_value(),
56 static const int kInvalidRootRegisterDelta = -1;
59 intptr_t MacroAssembler::RootRegisterDelta(ExternalReference other) {
60 if (predictable_code_size() &&
61 (other.address() <
reinterpret_cast<Address>(isolate()) ||
62 other.address() >=
reinterpret_cast<Address>(isolate() + 1))) {
63 return kInvalidRootRegisterDelta;
66 reinterpret_cast<Address>(isolate()->heap()->roots_array_start());
67 intptr_t delta = other.address() - roots_register_value;
72 Operand MacroAssembler::ExternalOperand(ExternalReference target,
74 if (root_array_available_ && !Serializer::enabled()) {
75 intptr_t delta = RootRegisterDelta(target);
76 if (delta != kInvalidRootRegisterDelta &&
is_int32(delta)) {
77 Serializer::TooLateToEnableNow();
81 movq(scratch, target);
82 return Operand(scratch, 0);
87 if (root_array_available_ && !Serializer::enabled()) {
88 intptr_t delta = RootRegisterDelta(source);
89 if (delta != kInvalidRootRegisterDelta &&
is_int32(delta)) {
90 Serializer::TooLateToEnableNow();
91 movq(destination, Operand(
kRootRegister, static_cast<int32_t>(delta)));
96 if (destination.is(
rax)) {
105 void MacroAssembler::Store(ExternalReference destination, Register source) {
106 if (root_array_available_ && !Serializer::enabled()) {
107 intptr_t delta = RootRegisterDelta(destination);
108 if (delta != kInvalidRootRegisterDelta &&
is_int32(delta)) {
109 Serializer::TooLateToEnableNow();
110 movq(Operand(
kRootRegister, static_cast<int32_t>(delta)), source);
115 if (source.is(
rax)) {
116 store_rax(destination);
124 void MacroAssembler::LoadAddress(Register destination,
125 ExternalReference source) {
126 if (root_array_available_ && !Serializer::enabled()) {
127 intptr_t delta = RootRegisterDelta(source);
128 if (delta != kInvalidRootRegisterDelta &&
is_int32(delta)) {
129 Serializer::TooLateToEnableNow();
130 lea(destination, Operand(
kRootRegister, static_cast<int32_t>(delta)));
135 movq(destination, source);
139 int MacroAssembler::LoadAddressSize(ExternalReference source) {
140 if (root_array_available_ && !Serializer::enabled()) {
144 intptr_t delta = RootRegisterDelta(source);
145 if (delta != kInvalidRootRegisterDelta &&
is_int32(delta)) {
146 Serializer::TooLateToEnableNow();
150 if (!
is_int8(static_cast<int32_t>(delta))) {
161 void MacroAssembler::PushAddress(ExternalReference source) {
162 int64_t address =
reinterpret_cast<int64_t
>(source.address());
163 if (
is_int32(address) && !Serializer::enabled()) {
164 if (emit_debug_code()) {
167 push(Immediate(static_cast<int32_t>(address)));
175 void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) {
176 ASSERT(root_array_available_);
182 void MacroAssembler::LoadRootIndexed(Register destination,
183 Register variable_offset,
185 ASSERT(root_array_available_);
193 void MacroAssembler::StoreRoot(Register source, Heap::RootListIndex index) {
194 ASSERT(root_array_available_);
200 void MacroAssembler::PushRoot(Heap::RootListIndex index) {
201 ASSERT(root_array_available_);
206 void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
207 ASSERT(root_array_available_);
213 void MacroAssembler::CompareRoot(
const Operand& with,
214 Heap::RootListIndex index) {
215 ASSERT(root_array_available_);
222 void MacroAssembler::RememberedSetHelper(Register
object,
226 RememberedSetFinalAction and_then) {
227 if (emit_debug_code()) {
229 JumpIfNotInNewSpace(
object, scratch, &ok, Label::kNear);
234 LoadRoot(scratch, Heap::kStoreBufferTopRootIndex);
236 movq(Operand(scratch, 0), addr);
240 StoreRoot(scratch, Heap::kStoreBufferTopRootIndex);
244 testq(scratch, Immediate(StoreBuffer::kStoreBufferOverflowBit));
245 if (and_then == kReturnAtEnd) {
246 Label buffer_overflowed;
247 j(
not_equal, &buffer_overflowed, Label::kNear);
249 bind(&buffer_overflowed);
251 ASSERT(and_then == kFallThroughAtEnd);
252 j(
equal, &done, Label::kNear);
254 StoreBufferOverflowStub store_buffer_overflow =
255 StoreBufferOverflowStub(save_fp);
256 CallStub(&store_buffer_overflow);
257 if (and_then == kReturnAtEnd) {
260 ASSERT(and_then == kFallThroughAtEnd);
266 void MacroAssembler::InNewSpace(Register
object,
270 Label::Distance distance) {
271 if (Serializer::enabled()) {
276 if (scratch.is(
object)) {
280 movq(scratch, ExternalReference::new_space_mask(isolate()));
281 and_(scratch,
object);
285 j(cc, branch, distance);
288 intptr_t new_space_start =
289 reinterpret_cast<intptr_t
>(
HEAP->NewSpaceStart());
291 if (scratch.is(
object)) {
296 and_(scratch, Immediate(static_cast<int32_t>(
HEAP->NewSpaceMask())));
297 j(cc, branch, distance);
302 void MacroAssembler::RecordWriteField(
321 JumpIfSmi(value, &done);
329 if (emit_debug_code()) {
332 j(
zero, &ok, Label::kNear);
338 object, dst, value, save_fp, remembered_set_action,
OMIT_SMI_CHECK);
344 if (emit_debug_code()) {
351 void MacroAssembler::RecordWriteArray(Register
object,
363 JumpIfSmi(value, &done);
367 Register dst = index;
372 object, dst, value, save_fp, remembered_set_action,
OMIT_SMI_CHECK);
378 if (emit_debug_code()) {
385 void MacroAssembler::RecordWrite(Register
object,
396 ASSERT(!
object.is(value));
397 ASSERT(!
object.is(address));
398 ASSERT(!value.is(address));
399 AssertNotSmi(
object);
402 !FLAG_incremental_marking) {
406 if (emit_debug_code()) {
408 cmpq(value, Operand(address, 0));
409 j(
equal, &ok, Label::kNear);
420 JumpIfSmi(value, &done);
425 MemoryChunk::kPointersToHereAreInterestingMask,
430 CheckPageFlag(
object,
432 MemoryChunk::kPointersFromHereAreInterestingMask,
437 RecordWriteStub stub(
object, value, address, remembered_set_action, fp_mode);
444 if (emit_debug_code()) {
451 void MacroAssembler::Assert(
Condition cc,
const char* msg) {
452 if (emit_debug_code()) Check(cc, msg);
456 void MacroAssembler::AssertFastElements(Register elements) {
457 if (emit_debug_code()) {
459 CompareRoot(
FieldOperand(elements, HeapObject::kMapOffset),
460 Heap::kFixedArrayMapRootIndex);
461 j(
equal, &ok, Label::kNear);
462 CompareRoot(
FieldOperand(elements, HeapObject::kMapOffset),
463 Heap::kFixedDoubleArrayMapRootIndex);
464 j(
equal, &ok, Label::kNear);
465 CompareRoot(
FieldOperand(elements, HeapObject::kMapOffset),
466 Heap::kFixedCOWArrayMapRootIndex);
467 j(
equal, &ok, Label::kNear);
468 Abort(
"JSObject with fast elements map has slow elements");
474 void MacroAssembler::Check(
Condition cc,
const char* msg) {
476 j(cc, &L, Label::kNear);
483 void MacroAssembler::CheckStackAlignment() {
484 int frame_alignment = OS::ActivationFrameAlignment();
485 int frame_alignment_mask = frame_alignment - 1;
488 Label alignment_as_expected;
489 testq(
rsp, Immediate(frame_alignment_mask));
490 j(
zero, &alignment_as_expected, Label::kNear);
493 bind(&alignment_as_expected);
498 void MacroAssembler::NegativeZeroTest(Register result,
502 testl(result, result);
510 void MacroAssembler::Abort(
const char* msg) {
516 intptr_t
p1 =
reinterpret_cast<intptr_t
>(msg);
519 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
522 RecordComment(
"Abort message: ");
530 reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(p1 - p0))),
538 CallRuntime(Runtime::kAbort, 2);
540 CallRuntime(Runtime::kAbort, 2);
547 void MacroAssembler::CallStub(CodeStub* stub, TypeFeedbackId ast_id) {
548 ASSERT(AllowThisStubCall(stub));
549 Call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id);
553 void MacroAssembler::TailCallStub(CodeStub* stub) {
554 ASSERT(allow_stub_calls_ || stub->CompilingCallsToThisStubIsGCSafe());
555 Jump(stub->GetCode(), RelocInfo::CODE_TARGET);
559 void MacroAssembler::StubReturn(
int argc) {
560 ASSERT(argc >= 1 && generating_stub());
565 bool MacroAssembler::AllowThisStubCall(CodeStub* stub) {
566 if (!has_frame_ && stub->SometimesSetsUpAFrame())
return false;
567 return allow_stub_calls_ || stub->CompilingCallsToThisStubIsGCSafe();
571 void MacroAssembler::IllegalOperation(
int num_arguments) {
572 if (num_arguments > 0) {
575 LoadRoot(
rax, Heap::kUndefinedValueRootIndex);
579 void MacroAssembler::IndexFromHash(Register hash, Register index) {
584 (1 << String::kArrayIndexValueBits));
589 and_(hash, Immediate(String::kArrayIndexValueMask));
590 shr(hash, Immediate(String::kHashShift));
594 Integer32ToSmi(index, hash);
598 void MacroAssembler::CallRuntime(Runtime::FunctionId
id,
int num_arguments) {
599 CallRuntime(Runtime::FunctionForId(
id), num_arguments);
603 void MacroAssembler::CallRuntimeSaveDoubles(Runtime::FunctionId
id) {
604 const Runtime::Function*
function = Runtime::FunctionForId(
id);
605 Set(
rax, function->nargs);
606 LoadAddress(
rbx, ExternalReference(
function, isolate()));
612 void MacroAssembler::CallRuntime(
const Runtime::Function* f,
617 if (f->nargs >= 0 && f->nargs != num_arguments) {
618 IllegalOperation(num_arguments);
626 Set(
rax, num_arguments);
627 LoadAddress(
rbx, ExternalReference(f, isolate()));
628 CEntryStub ces(f->result_size);
633 void MacroAssembler::CallExternalReference(
const ExternalReference& ext,
635 Set(
rax, num_arguments);
636 LoadAddress(
rbx, ext);
643 void MacroAssembler::TailCallExternalReference(
const ExternalReference& ext,
657 Set(
rax, num_arguments);
658 JumpToExternalReference(ext, result_size);
662 void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
665 TailCallExternalReference(ExternalReference(fid, isolate()),
671 static int Offset(ExternalReference ref0, ExternalReference ref1) {
672 int64_t offset = (ref0.address() - ref1.address());
674 ASSERT(static_cast<int>(offset) == offset);
675 return static_cast<int>(offset);
679 void MacroAssembler::PrepareCallApiFunction(
int arg_stack_space) {
680 #if defined(_WIN64) && !defined(__MINGW64__)
683 EnterApiExitFrame(arg_stack_space + 1);
688 EnterApiExitFrame(arg_stack_space);
693 void MacroAssembler::CallApiFunctionAndReturn(
Address function_address,
697 Label promote_scheduled_exception;
698 Label delete_allocated_handles;
699 Label leave_exit_frame;
702 Factory* factory = isolate()->factory();
703 ExternalReference next_address =
704 ExternalReference::handle_scope_next_address();
705 const int kNextOffset = 0;
706 const int kLimitOffset =
Offset(
707 ExternalReference::handle_scope_limit_address(),
709 const int kLevelOffset =
Offset(
710 ExternalReference::handle_scope_level_address(),
712 ExternalReference scheduled_exception_address =
713 ExternalReference::scheduled_exception_address(isolate());
716 Register prev_next_address_reg =
r14;
717 Register prev_limit_reg =
rbx;
718 Register base_reg =
r15;
719 movq(base_reg, next_address);
720 movq(prev_next_address_reg, Operand(base_reg, kNextOffset));
721 movq(prev_limit_reg, Operand(base_reg, kLimitOffset));
722 addl(Operand(base_reg, kLevelOffset), Immediate(1));
724 movq(
rax, reinterpret_cast<int64_t>(function_address),
728 #if defined(_WIN64) && !defined(__MINGW64__)
730 movq(
rax, Operand(
rax, 0));
734 j(
zero, &empty_result);
736 movq(
rax, Operand(
rax, 0));
741 subl(Operand(base_reg, kLevelOffset), Immediate(1));
742 movq(Operand(base_reg, kNextOffset), prev_next_address_reg);
743 cmpq(prev_limit_reg, Operand(base_reg, kLimitOffset));
745 bind(&leave_exit_frame);
748 movq(
rsi, scheduled_exception_address);
749 Cmp(Operand(
rsi, 0), factory->the_hole_value());
750 j(
not_equal, &promote_scheduled_exception);
752 #if ENABLE_EXTRA_CHECKS
755 Register return_value =
rax;
758 JumpIfSmi(return_value, &ok, Label::kNear);
759 movq(map,
FieldOperand(return_value, HeapObject::kMapOffset));
762 j(
below, &ok, Label::kNear);
767 CompareRoot(map, Heap::kHeapNumberMapRootIndex);
768 j(
equal, &ok, Label::kNear);
770 CompareRoot(return_value, Heap::kUndefinedValueRootIndex);
771 j(
equal, &ok, Label::kNear);
773 CompareRoot(return_value, Heap::kTrueValueRootIndex);
774 j(
equal, &ok, Label::kNear);
776 CompareRoot(return_value, Heap::kFalseValueRootIndex);
777 j(
equal, &ok, Label::kNear);
779 CompareRoot(return_value, Heap::kNullValueRootIndex);
780 j(
equal, &ok, Label::kNear);
782 Abort(
"API call returned invalid object");
792 LoadRoot(
rax, Heap::kUndefinedValueRootIndex);
795 bind(&promote_scheduled_exception);
796 TailCallRuntime(Runtime::kPromoteScheduledException, 0, 1);
799 bind(&delete_allocated_handles);
800 movq(Operand(base_reg, kLimitOffset), prev_limit_reg);
801 movq(prev_limit_reg,
rax);
803 LoadAddress(
rcx, ExternalReference::isolate_address());
805 LoadAddress(
rdi, ExternalReference::isolate_address());
808 ExternalReference::delete_handle_scope_extensions(isolate()));
810 movq(
rax, prev_limit_reg);
811 jmp(&leave_exit_frame);
815 void MacroAssembler::JumpToExternalReference(
const ExternalReference& ext,
818 LoadAddress(
rbx, ext);
819 CEntryStub ces(result_size);
820 jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
824 void MacroAssembler::InvokeBuiltin(Builtins::JavaScript
id,
826 const CallWrapper& call_wrapper) {
833 ParameterCount expected(0);
834 GetBuiltinEntry(
rdx,
id);
839 void MacroAssembler::GetBuiltinFunction(Register target,
840 Builtins::JavaScript
id) {
842 movq(target, Operand(
rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
843 movq(target,
FieldOperand(target, GlobalObject::kBuiltinsOffset));
845 JSBuiltinsObject::OffsetOfFunctionWithId(
id)));
849 void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript
id) {
852 GetBuiltinFunction(
rdi,
id);
857 #define REG(Name) { kRegister_ ## Name ## _Code }
859 static const Register saved_regs[] = {
866 static const int kNumberOfSavedRegs =
sizeof(saved_regs) /
sizeof(Register);
872 Register exclusion3) {
876 for (
int i = 0; i < kNumberOfSavedRegs; i++) {
877 Register reg = saved_regs[i];
878 if (!reg.is(exclusion1) && !reg.is(exclusion2) && !reg.is(exclusion3)) {
884 CpuFeatures::Scope scope(
SSE2);
887 XMMRegister reg = XMMRegister::from_code(i);
897 Register exclusion3) {
899 CpuFeatures::Scope scope(
SSE2);
901 XMMRegister reg = XMMRegister::from_code(i);
906 for (
int i = kNumberOfSavedRegs - 1; i >= 0; i--) {
907 Register reg = saved_regs[i];
908 if (!reg.is(exclusion1) && !reg.is(exclusion2) && !reg.is(exclusion3)) {
915 void MacroAssembler::Set(Register dst, int64_t x) {
919 movl(dst, Immediate(static_cast<uint32_t>(x)));
921 movq(dst, Immediate(static_cast<int32_t>(x)));
927 void MacroAssembler::Set(
const Operand& dst, int64_t x) {
929 movq(dst, Immediate(static_cast<int32_t>(x)));
937 bool MacroAssembler::IsUnsafeInt(
const int x) {
938 static const int kMaxBits = 17;
943 void MacroAssembler::SafeMove(Register dst, Smi* src) {
946 if (IsUnsafeInt(src->value()) && jit_cookie() != 0) {
947 Move(dst, Smi::FromInt(src->value() ^ jit_cookie()));
956 void MacroAssembler::SafePush(Smi* src) {
958 if (IsUnsafeInt(src->value()) && jit_cookie() != 0) {
959 Push(Smi::FromInt(src->value() ^ jit_cookie()));
971 Register MacroAssembler::GetSmiConstant(Smi* source) {
972 int value = source->value();
984 void MacroAssembler::LoadSmiConstant(Register dst, Smi* source) {
985 if (emit_debug_code()) {
990 if (allow_stub_calls()) {
991 Assert(
equal,
"Uninitialized kSmiConstantRegister");
994 j(
equal, &ok, Label::kNear);
999 int value = source->value();
1005 unsigned int uvalue = negative ? -value : value;
1044 void MacroAssembler::Integer32ToSmi(Register dst, Register src) {
1049 shl(dst, Immediate(kSmiShift));
1053 void MacroAssembler::Integer32ToSmiField(
const Operand& dst, Register src) {
1054 if (emit_debug_code()) {
1055 testb(dst, Immediate(0x01));
1057 j(
zero, &ok, Label::kNear);
1058 if (allow_stub_calls()) {
1059 Abort(
"Integer32ToSmiField writing to non-smi location");
1070 void MacroAssembler::Integer64PlusConstantToSmi(Register dst,
1074 addl(dst, Immediate(constant));
1076 leal(dst, Operand(src, constant));
1078 shl(dst, Immediate(kSmiShift));
1082 void MacroAssembler::SmiToInteger32(Register dst, Register src) {
1087 shr(dst, Immediate(kSmiShift));
1091 void MacroAssembler::SmiToInteger32(Register dst,
const Operand& src) {
1096 void MacroAssembler::SmiToInteger64(Register dst, Register src) {
1101 sar(dst, Immediate(kSmiShift));
1105 void MacroAssembler::SmiToInteger64(Register dst,
const Operand& src) {
1110 void MacroAssembler::SmiTest(Register src) {
1115 void MacroAssembler::SmiCompare(Register smi1, Register smi2) {
1122 void MacroAssembler::SmiCompare(Register dst, Smi* src) {
1128 void MacroAssembler::Cmp(Register dst, Smi* src) {
1130 if (src->value() == 0) {
1133 Register constant_reg = GetSmiConstant(src);
1134 cmpq(dst, constant_reg);
1139 void MacroAssembler::SmiCompare(Register dst,
const Operand& src) {
1146 void MacroAssembler::SmiCompare(
const Operand& dst, Register src) {
1153 void MacroAssembler::SmiCompare(
const Operand& dst, Smi* src) {
1155 cmpl(Operand(dst, kSmiShift /
kBitsPerByte), Immediate(src->value()));
1159 void MacroAssembler::Cmp(
const Operand& dst, Smi* src) {
1161 Register smi_reg = GetSmiConstant(src);
1162 ASSERT(!dst.AddressUsesRegister(smi_reg));
1167 void MacroAssembler::SmiCompareInteger32(
const Operand& dst, Register src) {
1172 void MacroAssembler::PositiveSmiTimesPowerOfTwoToInteger64(Register dst,
1178 SmiToInteger64(dst, src);
1184 if (power < kSmiShift) {
1185 sar(dst, Immediate(kSmiShift - power));
1186 }
else if (power > kSmiShift) {
1187 shl(dst, Immediate(power - kSmiShift));
1192 void MacroAssembler::PositiveSmiDivPowerOfTwoToInteger32(Register dst,
1195 ASSERT((0 <= power) && (power < 32));
1197 shr(dst, Immediate(power + kSmiShift));
1204 void MacroAssembler::SmiOrIfSmis(Register dst, Register src1, Register src2,
1206 Label::Distance near_jump) {
1207 if (dst.is(src1) || dst.is(src2)) {
1217 JumpIfNotSmi(dst, on_not_smis, near_jump);
1222 Condition MacroAssembler::CheckSmi(Register src) {
1229 Condition MacroAssembler::CheckSmi(
const Operand& src) {
1236 Condition MacroAssembler::CheckNonNegativeSmi(Register src) {
1246 Condition MacroAssembler::CheckBothSmi(Register first, Register second) {
1247 if (first.is(second)) {
1248 return CheckSmi(first);
1257 Condition MacroAssembler::CheckBothNonNegativeSmi(Register first,
1259 if (first.is(second)) {
1260 return CheckNonNegativeSmi(first);
1270 Condition MacroAssembler::CheckEitherSmi(Register first,
1273 if (first.is(second)) {
1274 return CheckSmi(first);
1276 if (scratch.is(second)) {
1277 andl(scratch, first);
1279 if (!scratch.is(first)) {
1280 movl(scratch, first);
1282 andl(scratch, second);
1289 Condition MacroAssembler::CheckIsMinSmi(Register src) {
1297 Condition MacroAssembler::CheckInteger32ValidSmiValue(Register src) {
1303 Condition MacroAssembler::CheckUInteger32ValidSmiValue(Register src) {
1311 void MacroAssembler::CheckSmiToIndicator(Register dst, Register src) {
1321 void MacroAssembler::CheckSmiToIndicator(Register dst,
const Operand& src) {
1322 if (!(src.AddressUsesRegister(dst))) {
1332 void MacroAssembler::JumpIfNotValidSmiValue(Register src,
1334 Label::Distance near_jump) {
1335 Condition is_valid = CheckInteger32ValidSmiValue(src);
1340 void MacroAssembler::JumpIfUIntNotValidSmiValue(Register src,
1342 Label::Distance near_jump) {
1343 Condition is_valid = CheckUInteger32ValidSmiValue(src);
1348 void MacroAssembler::JumpIfSmi(Register src,
1350 Label::Distance near_jump) {
1352 j(smi, on_smi, near_jump);
1356 void MacroAssembler::JumpIfNotSmi(Register src,
1358 Label::Distance near_jump) {
1364 void MacroAssembler::JumpUnlessNonNegativeSmi(
1365 Register src, Label* on_not_smi_or_negative,
1366 Label::Distance near_jump) {
1367 Condition non_negative_smi = CheckNonNegativeSmi(src);
1368 j(
NegateCondition(non_negative_smi), on_not_smi_or_negative, near_jump);
1372 void MacroAssembler::JumpIfSmiEqualsConstant(Register src,
1375 Label::Distance near_jump) {
1376 SmiCompare(src, constant);
1377 j(
equal, on_equals, near_jump);
1381 void MacroAssembler::JumpIfNotBothSmi(Register src1,
1383 Label* on_not_both_smi,
1384 Label::Distance near_jump) {
1385 Condition both_smi = CheckBothSmi(src1, src2);
1390 void MacroAssembler::JumpUnlessBothNonNegativeSmi(Register src1,
1392 Label* on_not_both_smi,
1393 Label::Distance near_jump) {
1394 Condition both_smi = CheckBothNonNegativeSmi(src1, src2);
1399 void MacroAssembler::SmiTryAddConstant(Register dst,
1402 Label* on_not_smi_result,
1403 Label::Distance near_jump) {
1410 JumpIfNotSmi(src, on_not_smi_result, near_jump);
1412 LoadSmiConstant(tmp, constant);
1414 j(
overflow, on_not_smi_result, near_jump);
1421 void MacroAssembler::SmiAddConstant(Register dst, Register src, Smi* constant) {
1422 if (constant->value() == 0) {
1427 }
else if (dst.is(src)) {
1429 switch (constant->value()) {
1443 Register constant_reg = GetSmiConstant(constant);
1444 addq(dst, constant_reg);
1448 switch (constant->value()) {
1462 LoadSmiConstant(dst, constant);
1470 void MacroAssembler::SmiAddConstant(
const Operand& dst, Smi* constant) {
1471 if (constant->value() != 0) {
1472 addl(Operand(dst, kSmiShift /
kBitsPerByte), Immediate(constant->value()));
1477 void MacroAssembler::SmiAddConstant(Register dst,
1480 Label* on_not_smi_result,
1481 Label::Distance near_jump) {
1482 if (constant->value() == 0) {
1486 }
else if (dst.is(src)) {
1491 j(
overflow, on_not_smi_result, near_jump);
1494 LoadSmiConstant(dst, constant);
1496 j(
overflow, on_not_smi_result, near_jump);
1501 void MacroAssembler::SmiSubConstant(Register dst, Register src, Smi* constant) {
1502 if (constant->value() == 0) {
1506 }
else if (dst.is(src)) {
1508 Register constant_reg = GetSmiConstant(constant);
1509 subq(dst, constant_reg);
1511 if (constant->value() == Smi::kMinValue) {
1512 LoadSmiConstant(dst, constant);
1518 LoadSmiConstant(dst, Smi::FromInt(-constant->value()));
1525 void MacroAssembler::SmiSubConstant(Register dst,
1528 Label* on_not_smi_result,
1529 Label::Distance near_jump) {
1530 if (constant->value() == 0) {
1534 }
else if (dst.is(src)) {
1536 if (constant->value() == Smi::kMinValue) {
1540 j(
not_sign, on_not_smi_result, near_jump);
1547 j(
overflow, on_not_smi_result, near_jump);
1551 if (constant->value() == Smi::kMinValue) {
1555 j(
not_sign, on_not_smi_result, near_jump);
1556 LoadSmiConstant(dst, constant);
1562 LoadSmiConstant(dst, Smi::FromInt(-(constant->value())));
1564 j(
overflow, on_not_smi_result, near_jump);
1570 void MacroAssembler::SmiNeg(Register dst,
1572 Label* on_smi_result,
1573 Label::Distance near_jump) {
1592 void MacroAssembler::SmiAdd(Register dst,
1595 Label* on_not_smi_result,
1596 Label::Distance near_jump) {
1602 j(
overflow, on_not_smi_result, near_jump);
1607 j(
overflow, on_not_smi_result, near_jump);
1612 void MacroAssembler::SmiAdd(Register dst,
1614 const Operand& src2,
1615 Label* on_not_smi_result,
1616 Label::Distance near_jump) {
1621 j(
overflow, on_not_smi_result, near_jump);
1624 ASSERT(!src2.AddressUsesRegister(dst));
1627 j(
overflow, on_not_smi_result, near_jump);
1632 void MacroAssembler::SmiAdd(Register dst,
1637 if (!dst.is(src1)) {
1638 if (emit_debug_code()) {
1643 lea(dst, Operand(src1, src2,
times_1, 0));
1651 void MacroAssembler::SmiSub(Register dst,
1654 Label* on_not_smi_result,
1655 Label::Distance near_jump) {
1660 j(
overflow, on_not_smi_result, near_jump);
1665 j(
overflow, on_not_smi_result, near_jump);
1670 void MacroAssembler::SmiSub(Register dst, Register src1, Register src2) {
1674 if (!dst.is(src1)) {
1682 void MacroAssembler::SmiSub(Register dst,
1684 const Operand& src2,
1685 Label* on_not_smi_result,
1686 Label::Distance near_jump) {
1691 j(
overflow, on_not_smi_result, near_jump);
1696 j(
overflow, on_not_smi_result, near_jump);
1701 void MacroAssembler::SmiSub(Register dst,
1703 const Operand& src2) {
1706 if (!dst.is(src1)) {
1714 void MacroAssembler::SmiMul(Register dst,
1717 Label* on_not_smi_result,
1718 Label::Distance near_jump) {
1725 Label failure, zero_correct_result;
1727 SmiToInteger64(dst, src1);
1729 j(
overflow, &failure, Label::kNear);
1733 Label correct_result;
1735 j(
not_zero, &correct_result, Label::kNear);
1740 j(
positive, &zero_correct_result, Label::kNear);
1744 jmp(on_not_smi_result, near_jump);
1746 bind(&zero_correct_result);
1749 bind(&correct_result);
1751 SmiToInteger64(dst, src1);
1753 j(
overflow, on_not_smi_result, near_jump);
1756 Label correct_result;
1758 j(
not_zero, &correct_result, Label::kNear);
1763 j(negative, on_not_smi_result, near_jump);
1764 bind(&correct_result);
1769 void MacroAssembler::SmiDiv(Register dst,
1772 Label* on_not_smi_result,
1773 Label::Distance near_jump) {
1783 j(
zero, on_not_smi_result, near_jump);
1788 SmiToInteger32(
rax, src1);
1797 testl(
rax, Immediate(0x7fffffff));
1798 j(
not_zero, &safe_div, Label::kNear);
1801 j(
positive, &safe_div, Label::kNear);
1803 jmp(on_not_smi_result, near_jump);
1805 j(negative, on_not_smi_result, near_jump);
1809 SmiToInteger32(src2, src2);
1813 Integer32ToSmi(src2, src2);
1818 j(
zero, &smi_result, Label::kNear);
1820 jmp(on_not_smi_result, near_jump);
1823 j(
not_zero, on_not_smi_result, near_jump);
1825 if (!dst.is(src1) && src1.is(
rax)) {
1828 Integer32ToSmi(dst,
rax);
1832 void MacroAssembler::SmiMod(Register dst,
1835 Label* on_not_smi_result,
1836 Label::Distance near_jump) {
1846 j(
zero, on_not_smi_result, near_jump);
1851 SmiToInteger32(
rax, src1);
1852 SmiToInteger32(src2, src2);
1856 cmpl(
rax, Immediate(Smi::kMinValue));
1858 cmpl(src2, Immediate(-1));
1861 Integer32ToSmi(src2, src2);
1865 jmp(on_not_smi_result, near_jump);
1872 Integer32ToSmi(src2, src2);
1880 j(
not_zero, &smi_result, Label::kNear);
1882 j(negative, on_not_smi_result, near_jump);
1884 Integer32ToSmi(dst,
rdx);
1888 void MacroAssembler::SmiNot(Register dst, Register src) {
1902 void MacroAssembler::SmiAnd(Register dst, Register src1, Register src2) {
1904 if (!dst.is(src1)) {
1911 void MacroAssembler::SmiAndConstant(Register dst, Register src, Smi* constant) {
1912 if (constant->value() == 0) {
1914 }
else if (dst.is(src)) {
1916 Register constant_reg = GetSmiConstant(constant);
1917 and_(dst, constant_reg);
1919 LoadSmiConstant(dst, constant);
1925 void MacroAssembler::SmiOr(Register dst, Register src1, Register src2) {
1926 if (!dst.is(src1)) {
1934 void MacroAssembler::SmiOrConstant(Register dst, Register src, Smi* constant) {
1937 Register constant_reg = GetSmiConstant(constant);
1938 or_(dst, constant_reg);
1940 LoadSmiConstant(dst, constant);
1946 void MacroAssembler::SmiXor(Register dst, Register src1, Register src2) {
1947 if (!dst.is(src1)) {
1955 void MacroAssembler::SmiXorConstant(Register dst, Register src, Smi* constant) {
1958 Register constant_reg = GetSmiConstant(constant);
1959 xor_(dst, constant_reg);
1961 LoadSmiConstant(dst, constant);
1967 void MacroAssembler::SmiShiftArithmeticRightConstant(Register dst,
1971 if (shift_value > 0) {
1973 sar(dst, Immediate(shift_value + kSmiShift));
1974 shl(dst, Immediate(kSmiShift));
1982 void MacroAssembler::SmiShiftLeftConstant(Register dst,
1988 if (shift_value > 0) {
1989 shl(dst, Immediate(shift_value));
1994 void MacroAssembler::SmiShiftLogicalRightConstant(
1995 Register dst, Register src,
int shift_value,
1996 Label* on_not_smi_result, Label::Distance near_jump) {
2002 if (shift_value == 0) {
2004 j(negative, on_not_smi_result, near_jump);
2006 shr(dst, Immediate(shift_value + kSmiShift));
2007 shl(dst, Immediate(kSmiShift));
2012 void MacroAssembler::SmiShiftLeft(Register dst,
2017 if (!dst.is(src1)) {
2020 SmiToInteger32(
rcx, src2);
2022 and_(
rcx, Immediate(0x1f));
2027 void MacroAssembler::SmiShiftLogicalRight(Register dst,
2030 Label* on_not_smi_result,
2031 Label::Distance near_jump) {
2038 if (src1.is(
rcx) || src2.is(
rcx)) {
2041 if (!dst.is(src1)) {
2044 SmiToInteger32(
rcx, src2);
2045 orl(
rcx, Immediate(kSmiShift));
2047 shl(dst, Immediate(kSmiShift));
2049 if (src1.is(
rcx) || src2.is(
rcx)) {
2050 Label positive_result;
2051 j(
positive, &positive_result, Label::kNear);
2057 jmp(on_not_smi_result, near_jump);
2058 bind(&positive_result);
2061 j(negative, on_not_smi_result, near_jump);
2066 void MacroAssembler::SmiShiftArithmeticRight(Register dst,
2075 }
else if (src2.is(
rcx)) {
2078 if (!dst.is(src1)) {
2081 SmiToInteger32(
rcx, src2);
2082 orl(
rcx, Immediate(kSmiShift));
2084 shl(dst, Immediate(kSmiShift));
2087 }
else if (src2.is(
rcx)) {
2093 void MacroAssembler::SelectNonSmi(Register dst,
2097 Label::Distance near_jump) {
2105 if (allow_stub_calls()) {
2107 Check(not_both_smis,
"Both registers were smis in SelectNonSmi.");
2116 j(
not_zero, on_not_smis, near_jump);
2132 SmiIndex MacroAssembler::SmiToIndex(Register dst,
2141 if (shift < kSmiShift) {
2142 sar(dst, Immediate(kSmiShift - shift));
2144 shl(dst, Immediate(shift - kSmiShift));
2146 return SmiIndex(dst,
times_1);
2149 SmiIndex MacroAssembler::SmiToNegativeIndex(Register dst,
2158 if (shift < kSmiShift) {
2159 sar(dst, Immediate(kSmiShift - shift));
2161 shl(dst, Immediate(shift - kSmiShift));
2163 return SmiIndex(dst,
times_1);
2167 void MacroAssembler::AddSmiField(Register dst,
const Operand& src) {
2173 void MacroAssembler::JumpIfNotString(Register
object,
2174 Register object_map,
2176 Label::Distance near_jump) {
2178 j(is_smi, not_string, near_jump);
2184 void MacroAssembler::JumpIfNotBothSequentialAsciiStrings(
2185 Register first_object,
2186 Register second_object,
2190 Label::Distance near_jump) {
2192 Condition either_smi = CheckEitherSmi(first_object, second_object);
2193 j(either_smi, on_fail, near_jump);
2196 movq(scratch1,
FieldOperand(first_object, HeapObject::kMapOffset));
2197 movq(scratch2,
FieldOperand(second_object, HeapObject::kMapOffset));
2198 movzxbl(scratch1,
FieldOperand(scratch1, Map::kInstanceTypeOffset));
2199 movzxbl(scratch2,
FieldOperand(scratch2, Map::kInstanceTypeOffset));
2203 const int kFlatAsciiStringMask =
2207 andl(scratch1, Immediate(kFlatAsciiStringMask));
2208 andl(scratch2, Immediate(kFlatAsciiStringMask));
2210 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
2211 lea(scratch1, Operand(scratch1, scratch2,
times_8, 0));
2213 Immediate(kFlatAsciiStringTag + (kFlatAsciiStringTag << 3)));
2218 void MacroAssembler::JumpIfInstanceTypeIsNotSequentialAscii(
2219 Register instance_type,
2222 Label::Distance near_jump) {
2223 if (!scratch.is(instance_type)) {
2224 movl(scratch, instance_type);
2227 const int kFlatAsciiStringMask =
2230 andl(scratch, Immediate(kFlatAsciiStringMask));
2236 void MacroAssembler::JumpIfBothInstanceTypesAreNotSequentialAscii(
2237 Register first_object_instance_type,
2238 Register second_object_instance_type,
2242 Label::Distance near_jump) {
2244 movq(scratch1, first_object_instance_type);
2245 movq(scratch2, second_object_instance_type);
2249 const int kFlatAsciiStringMask =
2253 andl(scratch1, Immediate(kFlatAsciiStringMask));
2254 andl(scratch2, Immediate(kFlatAsciiStringMask));
2256 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
2257 lea(scratch1, Operand(scratch1, scratch2,
times_8, 0));
2259 Immediate(kFlatAsciiStringTag + (kFlatAsciiStringTag << 3)));
2265 void MacroAssembler::Move(Register dst, Register src) {
2272 void MacroAssembler::Move(Register dst, Handle<Object> source) {
2273 ASSERT(!source->IsFailure());
2274 if (source->IsSmi()) {
2275 Move(dst, Smi::cast(*source));
2277 movq(dst, source, RelocInfo::EMBEDDED_OBJECT);
2282 void MacroAssembler::Move(
const Operand& dst, Handle<Object> source) {
2283 ASSERT(!source->IsFailure());
2284 if (source->IsSmi()) {
2285 Move(dst, Smi::cast(*source));
2293 void MacroAssembler::Cmp(Register dst, Handle<Object> source) {
2294 if (source->IsSmi()) {
2295 Cmp(dst, Smi::cast(*source));
2303 void MacroAssembler::Cmp(
const Operand& dst, Handle<Object> source) {
2304 if (source->IsSmi()) {
2305 Cmp(dst, Smi::cast(*source));
2307 ASSERT(source->IsHeapObject());
2314 void MacroAssembler::Push(Handle<Object> source) {
2315 if (source->IsSmi()) {
2316 Push(Smi::cast(*source));
2318 ASSERT(source->IsHeapObject());
2325 void MacroAssembler::LoadHeapObject(Register result,
2326 Handle<HeapObject>
object) {
2327 if (isolate()->heap()->InNewSpace(*
object)) {
2328 Handle<JSGlobalPropertyCell> cell =
2329 isolate()->factory()->NewJSGlobalPropertyCell(
object);
2330 movq(result, cell, RelocInfo::GLOBAL_PROPERTY_CELL);
2331 movq(result, Operand(result, 0));
2333 Move(result,
object);
2338 void MacroAssembler::PushHeapObject(Handle<HeapObject>
object) {
2339 if (isolate()->heap()->InNewSpace(*
object)) {
2340 Handle<JSGlobalPropertyCell> cell =
2341 isolate()->factory()->NewJSGlobalPropertyCell(
object);
2351 void MacroAssembler::LoadGlobalCell(Register dst,
2352 Handle<JSGlobalPropertyCell> cell) {
2354 load_rax(cell.location(), RelocInfo::GLOBAL_PROPERTY_CELL);
2356 movq(dst, cell, RelocInfo::GLOBAL_PROPERTY_CELL);
2357 movq(dst, Operand(dst, 0));
2362 void MacroAssembler::Push(Smi* source) {
2363 intptr_t smi =
reinterpret_cast<intptr_t
>(source);
2365 push(Immediate(static_cast<int32_t>(smi)));
2367 Register constant = GetSmiConstant(source);
2373 void MacroAssembler::Drop(
int stack_elements) {
2374 if (stack_elements > 0) {
2380 void MacroAssembler::Test(
const Operand& src, Smi* source) {
2381 testl(Operand(src,
kIntSize), Immediate(source->value()));
2385 void MacroAssembler::TestBit(
const Operand& src,
int bits) {
2388 testb(Operand(src, byte_offset), Immediate(1 << bit_in_byte));
2392 void MacroAssembler::Jump(ExternalReference ext) {
2398 void MacroAssembler::Jump(
Address destination, RelocInfo::Mode rmode) {
2404 void MacroAssembler::Jump(Handle<Code> code_object, RelocInfo::Mode rmode) {
2406 jmp(code_object, rmode);
2410 int MacroAssembler::CallSize(ExternalReference ext) {
2412 const int kCallInstructionSize = 3;
2413 return LoadAddressSize(ext) + kCallInstructionSize;
2417 void MacroAssembler::Call(ExternalReference ext) {
2419 int end_position = pc_offset() + CallSize(ext);
2424 CHECK_EQ(end_position, pc_offset());
2429 void MacroAssembler::Call(
Address destination, RelocInfo::Mode rmode) {
2431 int end_position = pc_offset() + CallSize(destination, rmode);
2436 CHECK_EQ(pc_offset(), end_position);
2441 void MacroAssembler::Call(Handle<Code> code_object,
2442 RelocInfo::Mode rmode,
2443 TypeFeedbackId ast_id) {
2445 int end_position = pc_offset() + CallSize(code_object);
2447 ASSERT(RelocInfo::IsCodeTarget(rmode));
2448 call(code_object, rmode, ast_id);
2450 CHECK_EQ(end_position, pc_offset());
2455 void MacroAssembler::Pushad() {
2475 lea(
rsp, Operand(
rsp, -sp_delta));
2479 void MacroAssembler::Popad() {
2483 lea(
rsp, Operand(
rsp, sp_delta));
2498 void MacroAssembler::Dropad() {
2526 void MacroAssembler::StoreToSafepointRegisterSlot(Register dst,
2527 const Immediate& imm) {
2528 movq(SafepointRegisterSlot(dst), imm);
2532 void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Register src) {
2533 movq(SafepointRegisterSlot(dst), src);
2537 void MacroAssembler::LoadFromSafepointRegisterSlot(Register dst, Register src) {
2538 movq(dst, SafepointRegisterSlot(src));
2542 Operand MacroAssembler::SafepointRegisterSlot(Register reg) {
2543 return Operand(
rsp, SafepointRegisterStackIndex(reg.code()) *
kPointerSize);
2547 void MacroAssembler::PushTryHandler(StackHandler::Kind kind,
2548 int handler_index) {
2559 if (kind == StackHandler::JS_ENTRY) {
2564 Push(Smi::FromInt(0));
2572 StackHandler::IndexField::encode(handler_index) |
2573 StackHandler::KindField::encode(kind);
2574 push(Immediate(state));
2578 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
2579 push(ExternalOperand(handler_address));
2581 movq(ExternalOperand(handler_address),
rsp);
2585 void MacroAssembler::PopTryHandler() {
2587 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
2588 pop(ExternalOperand(handler_address));
2593 void MacroAssembler::JumpToHandlerEntry() {
2598 shr(
rdx, Immediate(StackHandler::kKindWidth));
2600 SmiToInteger64(
rdx,
rdx);
2606 void MacroAssembler::Throw(Register value) {
2616 if (!value.is(
rax)) {
2620 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
2621 movq(
rsp, ExternalOperand(handler_address));
2623 pop(ExternalOperand(handler_address));
2638 j(
zero, &skip, Label::kNear);
2639 movq(Operand(
rbp, StandardFrameConstants::kContextOffset),
rsi);
2642 JumpToHandlerEntry();
2646 void MacroAssembler::ThrowUncatchable(Register value) {
2656 if (!value.is(
rax)) {
2660 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
2664 Label fetch_next, check_kind;
2665 jmp(&check_kind, Label::kNear);
2667 movq(
rsp, Operand(
rsp, StackHandlerConstants::kNextOffset));
2671 testl(Operand(
rsp, StackHandlerConstants::kStateOffset),
2672 Immediate(StackHandler::KindField::kMask));
2676 pop(ExternalOperand(handler_address));
2686 JumpToHandlerEntry();
2690 void MacroAssembler::Ret() {
2695 void MacroAssembler::Ret(
int bytes_dropped, Register scratch) {
2700 addq(
rsp, Immediate(bytes_dropped));
2707 void MacroAssembler::FCmp() {
2713 void MacroAssembler::CmpObjectType(Register heap_object,
2716 movq(map,
FieldOperand(heap_object, HeapObject::kMapOffset));
2717 CmpInstanceType(map, type);
2721 void MacroAssembler::CmpInstanceType(Register map,
InstanceType type) {
2723 Immediate(static_cast<int8_t>(type)));
2727 void MacroAssembler::CheckFastElements(Register map,
2729 Label::Distance distance) {
2735 Immediate(Map::kMaximumBitField2FastHoleyElementValue));
2736 j(
above, fail, distance);
2740 void MacroAssembler::CheckFastObjectElements(Register map,
2742 Label::Distance distance) {
2748 Immediate(Map::kMaximumBitField2FastHoleySmiElementValue));
2751 Immediate(Map::kMaximumBitField2FastHoleyElementValue));
2752 j(
above, fail, distance);
2756 void MacroAssembler::CheckFastSmiElements(Register map,
2758 Label::Distance distance) {
2762 Immediate(Map::kMaximumBitField2FastHoleySmiElementValue));
2763 j(
above, fail, distance);
2767 void MacroAssembler::StoreNumberToDoubleElements(
2768 Register maybe_number,
2771 XMMRegister xmm_scratch,
2773 Label smi_value, is_nan, maybe_nan, not_nan, have_double_value, done;
2775 JumpIfSmi(maybe_number, &smi_value, Label::kNear);
2777 CheckMap(maybe_number,
2778 isolate()->factory()->heap_number_map(),
2789 movsd(xmm_scratch,
FieldOperand(maybe_number, HeapNumber::kValueOffset));
2790 bind(&have_double_value);
2798 j(
greater, &is_nan, Label::kNear);
2799 cmpl(
FieldOperand(maybe_number, HeapNumber::kValueOffset), Immediate(0));
2805 FixedDoubleArray::canonical_not_the_hole_nan_as_double()));
2807 jmp(&have_double_value, Label::kNear);
2820 void MacroAssembler::CompareMap(Register obj,
2822 Label* early_success,
2829 Map* current_map = *map;
2832 current_map = current_map->LookupElementsTransitionMap(kind);
2833 if (!current_map)
break;
2834 j(
equal, early_success, Label::kNear);
2836 Handle<Map>(current_map));
2843 void MacroAssembler::CheckMap(Register obj,
2849 JumpIfSmi(obj, fail);
2853 CompareMap(obj, map, &success, mode);
2859 void MacroAssembler::ClampUint8(Register reg) {
2861 testl(reg, Immediate(0xFFFFFF00));
2862 j(
zero, &done, Label::kNear);
2863 setcc(negative, reg);
2869 void MacroAssembler::ClampDoubleToUint8(XMMRegister input_reg,
2870 XMMRegister temp_xmm_reg,
2871 Register result_reg) {
2874 xorps(temp_xmm_reg, temp_xmm_reg);
2875 cvtsd2si(result_reg, input_reg);
2876 testl(result_reg, Immediate(0xFFFFFF00));
2877 j(
zero, &done, Label::kNear);
2878 cmpl(result_reg, Immediate(0x80000000));
2879 j(
equal, &conv_failure, Label::kNear);
2880 movl(result_reg, Immediate(0));
2881 setcc(
above, result_reg);
2882 subl(result_reg, Immediate(1));
2883 andl(result_reg, Immediate(255));
2884 jmp(&done, Label::kNear);
2885 bind(&conv_failure);
2887 ucomisd(input_reg, temp_xmm_reg);
2888 j(
below, &done, Label::kNear);
2889 Set(result_reg, 255);
2894 static double kUint32Bias =
2895 static_cast<double>(
static_cast<uint32_t
>(0xFFFFFFFF)) + 1;
2898 void MacroAssembler::LoadUint32(XMMRegister dst,
2900 XMMRegister scratch) {
2902 cmpl(src, Immediate(0));
2904 reinterpret_cast<int64_t>(&kUint32Bias),
2907 cvtlsi2sd(dst, src);
2909 addsd(dst, scratch);
2914 void MacroAssembler::LoadInstanceDescriptors(Register map,
2915 Register descriptors) {
2916 movq(descriptors,
FieldOperand(map, Map::kDescriptorsOffset));
2920 void MacroAssembler::NumberOfOwnDescriptors(Register dst, Register map) {
2922 DecodeField<Map::NumberOfOwnDescriptorsBits>(dst);
2926 void MacroAssembler::EnumLength(Register dst, Register map) {
2934 void MacroAssembler::DispatchMap(Register obj,
2936 Handle<Code> success,
2940 JumpIfSmi(obj, &fail);
2943 j(
equal, success, RelocInfo::CODE_TARGET);
2949 void MacroAssembler::AssertNumber(Register
object) {
2950 if (emit_debug_code()) {
2953 j(is_smi, &ok, Label::kNear);
2955 isolate()->factory()->heap_number_map());
2956 Check(
equal,
"Operand is not a number");
2962 void MacroAssembler::AssertNotSmi(Register
object) {
2963 if (emit_debug_code()) {
2970 void MacroAssembler::AssertSmi(Register
object) {
2971 if (emit_debug_code()) {
2973 Check(is_smi,
"Operand is not a smi");
2978 void MacroAssembler::AssertSmi(
const Operand&
object) {
2979 if (emit_debug_code()) {
2981 Check(is_smi,
"Operand is not a smi");
2986 void MacroAssembler::AssertZeroExtended(Register int32_register) {
2987 if (emit_debug_code()) {
2991 Check(
above_equal,
"32 bit value in register is not zero-extended");
2996 void MacroAssembler::AssertString(Register
object) {
2997 if (emit_debug_code()) {
2999 Check(
not_equal,
"Operand is a smi and not a string");
3001 movq(
object,
FieldOperand(
object, HeapObject::kMapOffset));
3004 Check(
below,
"Operand is not a string");
3009 void MacroAssembler::AssertRootValue(Register src,
3010 Heap::RootListIndex root_value_index,
3012 if (emit_debug_code()) {
3016 Check(
equal, message);
3022 Condition MacroAssembler::IsObjectStringType(Register heap_object,
3024 Register instance_type) {
3025 movq(map,
FieldOperand(heap_object, HeapObject::kMapOffset));
3026 movzxbl(instance_type,
FieldOperand(map, Map::kInstanceTypeOffset));
3033 void MacroAssembler::TryGetFunctionPrototype(Register
function,
3036 bool miss_on_bound_function) {
3045 if (miss_on_bound_function) {
3047 FieldOperand(
function, JSFunction::kSharedFunctionInfoOffset));
3051 SharedFunctionInfo::kCompilerHintsOffset),
3052 SharedFunctionInfo::kBoundFunction);
3059 Immediate(1 << Map::kHasNonInstancePrototype));
3060 j(
not_zero, &non_instance, Label::kNear);
3064 FieldOperand(
function, JSFunction::kPrototypeOrInitialMapOffset));
3069 CompareRoot(result, Heap::kTheHoleValueRootIndex);
3078 movq(result,
FieldOperand(result, Map::kPrototypeOffset));
3079 jmp(&done, Label::kNear);
3083 bind(&non_instance);
3084 movq(result,
FieldOperand(result, Map::kConstructorOffset));
3091 void MacroAssembler::SetCounter(StatsCounter* counter,
int value) {
3092 if (FLAG_native_code_counters && counter->Enabled()) {
3093 Operand counter_operand = ExternalOperand(ExternalReference(counter));
3094 movl(counter_operand, Immediate(value));
3099 void MacroAssembler::IncrementCounter(StatsCounter* counter,
int value) {
3101 if (FLAG_native_code_counters && counter->Enabled()) {
3102 Operand counter_operand = ExternalOperand(ExternalReference(counter));
3104 incl(counter_operand);
3106 addl(counter_operand, Immediate(value));
3112 void MacroAssembler::DecrementCounter(StatsCounter* counter,
int value) {
3114 if (FLAG_native_code_counters && counter->Enabled()) {
3115 Operand counter_operand = ExternalOperand(ExternalReference(counter));
3117 decl(counter_operand);
3119 subl(counter_operand, Immediate(value));
3125 #ifdef ENABLE_DEBUGGER_SUPPORT
3126 void MacroAssembler::DebugBreak() {
3128 LoadAddress(
rbx, ExternalReference(Runtime::kDebugBreak, isolate()));
3130 ASSERT(AllowThisStubCall(&ces));
3133 #endif // ENABLE_DEBUGGER_SUPPORT
3136 void MacroAssembler::SetCallKind(Register dst,
CallKind call_kind) {
3143 LoadSmiConstant(dst, Smi::FromInt(1));
3145 LoadSmiConstant(dst, Smi::FromInt(0));
3150 void MacroAssembler::InvokeCode(Register
code,
3151 const ParameterCount& expected,
3152 const ParameterCount& actual,
3154 const CallWrapper& call_wrapper,
3160 bool definitely_mismatches =
false;
3161 InvokePrologue(expected,
3163 Handle<Code>::null(),
3166 &definitely_mismatches,
3171 if (!definitely_mismatches) {
3173 call_wrapper.BeforeCall(CallSize(code));
3174 SetCallKind(
rcx, call_kind);
3176 call_wrapper.AfterCall();
3179 SetCallKind(
rcx, call_kind);
3187 void MacroAssembler::InvokeCode(Handle<Code> code,
3188 const ParameterCount& expected,
3189 const ParameterCount& actual,
3190 RelocInfo::Mode rmode,
3192 const CallWrapper& call_wrapper,
3198 bool definitely_mismatches =
false;
3199 Register dummy =
rax;
3200 InvokePrologue(expected,
3205 &definitely_mismatches,
3210 if (!definitely_mismatches) {
3212 call_wrapper.BeforeCall(CallSize(code));
3213 SetCallKind(
rcx, call_kind);
3215 call_wrapper.AfterCall();
3218 SetCallKind(
rcx, call_kind);
3226 void MacroAssembler::InvokeFunction(Register
function,
3227 const ParameterCount& actual,
3229 const CallWrapper& call_wrapper,
3235 movq(
rdx,
FieldOperand(
function, JSFunction::kSharedFunctionInfoOffset));
3243 ParameterCount expected(
rbx);
3244 InvokeCode(
rdx, expected, actual, flag, call_wrapper, call_kind);
3248 void MacroAssembler::InvokeFunction(Handle<JSFunction>
function,
3249 const ParameterCount& actual,
3251 const CallWrapper& call_wrapper,
3257 LoadHeapObject(
rdi,
function);
3264 ParameterCount expected(function->shared()->formal_parameter_count());
3265 InvokeCode(
rdx, expected, actual, flag, call_wrapper, call_kind);
3269 void MacroAssembler::InvokePrologue(
const ParameterCount& expected,
3270 const ParameterCount& actual,
3271 Handle<Code> code_constant,
3272 Register code_register,
3274 bool* definitely_mismatches,
3276 Label::Distance near_jump,
3277 const CallWrapper& call_wrapper,
3279 bool definitely_matches =
false;
3280 *definitely_mismatches =
false;
3282 if (expected.is_immediate()) {
3283 ASSERT(actual.is_immediate());
3284 if (expected.immediate() == actual.immediate()) {
3285 definitely_matches =
true;
3287 Set(
rax, actual.immediate());
3288 if (expected.immediate() ==
3289 SharedFunctionInfo::kDontAdaptArgumentsSentinel) {
3294 definitely_matches =
true;
3296 *definitely_mismatches =
true;
3297 Set(
rbx, expected.immediate());
3301 if (actual.is_immediate()) {
3305 cmpq(expected.reg(), Immediate(actual.immediate()));
3306 j(
equal, &invoke, Label::kNear);
3308 Set(
rax, actual.immediate());
3309 }
else if (!expected.reg().is(actual.reg())) {
3312 cmpq(expected.reg(), actual.reg());
3313 j(
equal, &invoke, Label::kNear);
3319 if (!definitely_matches) {
3320 Handle<Code> adaptor = isolate()->builtins()->ArgumentsAdaptorTrampoline();
3321 if (!code_constant.is_null()) {
3322 movq(
rdx, code_constant, RelocInfo::EMBEDDED_OBJECT);
3324 }
else if (!code_register.is(
rdx)) {
3325 movq(
rdx, code_register);
3329 call_wrapper.BeforeCall(CallSize(adaptor));
3330 SetCallKind(
rcx, call_kind);
3331 Call(adaptor, RelocInfo::CODE_TARGET);
3332 call_wrapper.AfterCall();
3333 if (!*definitely_mismatches) {
3334 jmp(done, near_jump);
3337 SetCallKind(
rcx, call_kind);
3338 Jump(adaptor, RelocInfo::CODE_TARGET);
3345 void MacroAssembler::EnterFrame(StackFrame::Type type) {
3349 Push(Smi::FromInt(type));
3352 if (emit_debug_code()) {
3354 isolate()->factory()->undefined_value(),
3355 RelocInfo::EMBEDDED_OBJECT);
3357 Check(
not_equal,
"code object not properly patched");
3362 void MacroAssembler::LeaveFrame(StackFrame::Type type) {
3363 if (emit_debug_code()) {
3366 Check(
equal,
"stack frame types must match");
3373 void MacroAssembler::EnterExitFramePrologue(
bool save_rax) {
3393 Store(ExternalReference(Isolate::kCEntryFPAddress, isolate()),
rbp);
3394 Store(ExternalReference(Isolate::kContextAddress, isolate()),
rsi);
3398 void MacroAssembler::EnterExitFrameEpilogue(
int arg_stack_space,
3399 bool save_doubles) {
3401 const int kShadowSpace = 4;
3402 arg_stack_space += kShadowSpace;
3406 int space = XMMRegister::kNumRegisters *
kDoubleSize +
3408 subq(
rsp, Immediate(space));
3410 for (
int i = 0; i < XMMRegister::kNumAllocatableRegisters; i++) {
3411 XMMRegister reg = XMMRegister::FromAllocationIndex(i);
3414 }
else if (arg_stack_space > 0) {
3415 subq(
rsp, Immediate(arg_stack_space * kPointerSize));
3419 const int kFrameAlignment = OS::ActivationFrameAlignment();
3420 if (kFrameAlignment > 0) {
3423 and_(
rsp, Immediate(-kFrameAlignment));
3427 movq(Operand(
rbp, ExitFrameConstants::kSPOffset),
rsp);
3431 void MacroAssembler::EnterExitFrame(
int arg_stack_space,
bool save_doubles) {
3432 EnterExitFramePrologue(
true);
3436 int offset = StandardFrameConstants::kCallerSPOffset -
kPointerSize;
3439 EnterExitFrameEpilogue(arg_stack_space, save_doubles);
3443 void MacroAssembler::EnterApiExitFrame(
int arg_stack_space) {
3444 EnterExitFramePrologue(
false);
3445 EnterExitFrameEpilogue(arg_stack_space,
false);
3449 void MacroAssembler::LeaveExitFrame(
bool save_doubles) {
3454 for (
int i = 0; i < XMMRegister::kNumAllocatableRegisters; i++) {
3455 XMMRegister reg = XMMRegister::FromAllocationIndex(i);
3460 movq(
rcx, Operand(
rbp, 1 * kPointerSize));
3461 movq(
rbp, Operand(
rbp, 0 * kPointerSize));
3465 lea(
rsp, Operand(
r15, 1 * kPointerSize));
3470 LeaveExitFrameEpilogue();
3474 void MacroAssembler::LeaveApiExitFrame() {
3478 LeaveExitFrameEpilogue();
3482 void MacroAssembler::LeaveExitFrameEpilogue() {
3484 ExternalReference context_address(Isolate::kContextAddress, isolate());
3485 Operand context_operand = ExternalOperand(context_address);
3486 movq(
rsi, context_operand);
3488 movq(context_operand, Immediate(0));
3492 ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress,
3494 Operand c_entry_fp_operand = ExternalOperand(c_entry_fp_address);
3495 movq(c_entry_fp_operand, Immediate(0));
3499 void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
3502 Label same_contexts;
3504 ASSERT(!holder_reg.is(scratch));
3507 movq(scratch, Operand(
rbp, StandardFrameConstants::kContextOffset));
3510 if (emit_debug_code()) {
3511 cmpq(scratch, Immediate(0));
3512 Check(
not_equal,
"we should not have an empty lexical context");
3516 Context::kHeaderSize + Context::GLOBAL_OBJECT_INDEX *
kPointerSize;
3518 movq(scratch,
FieldOperand(scratch, GlobalObject::kNativeContextOffset));
3521 if (emit_debug_code()) {
3523 isolate()->factory()->native_context_map());
3524 Check(
equal,
"JSGlobalObject::native_context should be a native context.");
3528 cmpq(scratch,
FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
3529 j(
equal, &same_contexts);
3537 if (emit_debug_code()) {
3541 FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
3542 CompareRoot(holder_reg, Heap::kNullValueRootIndex);
3543 Check(
not_equal,
"JSGlobalProxy::context() should not be null.");
3546 movq(holder_reg,
FieldOperand(holder_reg, HeapObject::kMapOffset));
3547 CompareRoot(holder_reg, Heap::kNativeContextMapRootIndex);
3548 Check(
equal,
"JSGlobalObject::native_context should be a native context.");
3553 FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
3555 Context::kHeaderSize + Context::SECURITY_TOKEN_INDEX *
kPointerSize;
3560 bind(&same_contexts);
3564 void MacroAssembler::GetNumberHash(Register
r0, Register scratch) {
3566 LoadRoot(scratch, Heap::kHashSeedRootIndex);
3567 SmiToInteger32(scratch, scratch);
3578 shll(scratch, Immediate(15));
3582 shrl(scratch, Immediate(12));
3585 leal(r0, Operand(r0, r0,
times_4, 0));
3588 shrl(scratch, Immediate(4));
3591 imull(r0, r0, Immediate(2057));
3594 shrl(scratch, Immediate(16));
3600 void MacroAssembler::LoadFromNumberDictionary(Label* miss,
3630 GetNumberHash(r0, r1);
3634 SeededNumberDictionary::kCapacityOffset));
3638 const int kProbes = 4;
3639 for (
int i = 0; i < kProbes; i++) {
3644 addl(r2, Immediate(SeededNumberDictionary::GetProbeOffset(i)));
3649 ASSERT(SeededNumberDictionary::kEntrySize == 3);
3650 lea(r2, Operand(r2, r2,
times_2, 0));
3656 SeededNumberDictionary::kElementsStartOffset));
3657 if (i != (kProbes - 1)) {
3666 const int kDetailsOffset =
3667 SeededNumberDictionary::kElementsStartOffset + 2 *
kPointerSize;
3670 Smi::FromInt(PropertyDetails::TypeField::kMask));
3674 const int kValueOffset =
3675 SeededNumberDictionary::kElementsStartOffset +
kPointerSize;
3680 void MacroAssembler::LoadAllocationTopHelper(Register result,
3683 ExternalReference new_space_allocation_top =
3684 ExternalReference::new_space_allocation_top_address(isolate());
3689 ASSERT(!scratch.is_valid());
3692 Operand top_operand = ExternalOperand(new_space_allocation_top);
3693 cmpq(result, top_operand);
3694 Check(
equal,
"Unexpected allocation top");
3701 if (scratch.is_valid()) {
3702 LoadAddress(scratch, new_space_allocation_top);
3703 movq(result, Operand(scratch, 0));
3705 Load(result, new_space_allocation_top);
3710 void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
3712 if (emit_debug_code()) {
3714 Check(
zero,
"Unaligned allocation in new space");
3717 ExternalReference new_space_allocation_top =
3718 ExternalReference::new_space_allocation_top_address(isolate());
3721 if (scratch.is_valid()) {
3723 movq(Operand(scratch, 0), result_end);
3725 Store(new_space_allocation_top, result_end);
3730 void MacroAssembler::AllocateInNewSpace(
int object_size,
3732 Register result_end,
3736 if (!FLAG_inline_new) {
3737 if (emit_debug_code()) {
3739 movl(result, Immediate(0x7091));
3740 if (result_end.is_valid()) {
3741 movl(result_end, Immediate(0x7191));
3743 if (scratch.is_valid()) {
3744 movl(scratch, Immediate(0x7291));
3750 ASSERT(!result.is(result_end));
3753 LoadAllocationTopHelper(result, scratch, flags);
3756 ExternalReference new_space_allocation_limit =
3757 ExternalReference::new_space_allocation_limit_address(isolate());
3759 Register top_reg = result_end.is_valid() ? result_end : result;
3761 if (!top_reg.is(result)) {
3762 movq(top_reg, result);
3764 addq(top_reg, Immediate(object_size));
3765 j(
carry, gc_required);
3766 Operand limit_operand = ExternalOperand(new_space_allocation_limit);
3767 cmpq(top_reg, limit_operand);
3768 j(
above, gc_required);
3771 UpdateAllocationTopHelper(top_reg, scratch);
3773 if (top_reg.is(result)) {
3777 subq(result, Immediate(object_size));
3779 }
else if ((flags & TAG_OBJECT) != 0) {
3786 void MacroAssembler::AllocateInNewSpace(
int header_size,
3788 Register element_count,
3790 Register result_end,
3794 if (!FLAG_inline_new) {
3795 if (emit_debug_code()) {
3797 movl(result, Immediate(0x7091));
3798 movl(result_end, Immediate(0x7191));
3799 if (scratch.is_valid()) {
3800 movl(scratch, Immediate(0x7291));
3807 ASSERT(!result.is(result_end));
3810 LoadAllocationTopHelper(result, scratch, flags);
3813 ExternalReference new_space_allocation_limit =
3814 ExternalReference::new_space_allocation_limit_address(isolate());
3818 lea(result_end, Operand(element_count, element_size, header_size));
3819 addq(result_end, result);
3820 j(
carry, gc_required);
3821 Operand limit_operand = ExternalOperand(new_space_allocation_limit);
3822 cmpq(result_end, limit_operand);
3823 j(
above, gc_required);
3826 UpdateAllocationTopHelper(result_end, scratch);
3829 if ((flags & TAG_OBJECT) != 0) {
3835 void MacroAssembler::AllocateInNewSpace(Register object_size,
3837 Register result_end,
3841 if (!FLAG_inline_new) {
3842 if (emit_debug_code()) {
3844 movl(result, Immediate(0x7091));
3845 movl(result_end, Immediate(0x7191));
3846 if (scratch.is_valid()) {
3847 movl(scratch, Immediate(0x7291));
3854 ASSERT(!result.is(result_end));
3857 LoadAllocationTopHelper(result, scratch, flags);
3860 ExternalReference new_space_allocation_limit =
3861 ExternalReference::new_space_allocation_limit_address(isolate());
3862 if (!object_size.is(result_end)) {
3863 movq(result_end, object_size);
3865 addq(result_end, result);
3866 j(
carry, gc_required);
3867 Operand limit_operand = ExternalOperand(new_space_allocation_limit);
3868 cmpq(result_end, limit_operand);
3869 j(
above, gc_required);
3872 UpdateAllocationTopHelper(result_end, scratch);
3875 if ((flags & TAG_OBJECT) != 0) {
3881 void MacroAssembler::UndoAllocationInNewSpace(Register
object) {
3882 ExternalReference new_space_allocation_top =
3883 ExternalReference::new_space_allocation_top_address(isolate());
3887 Operand top_operand = ExternalOperand(new_space_allocation_top);
3889 cmpq(
object, top_operand);
3890 Check(
below,
"Undo allocation of non allocated memory");
3892 movq(top_operand,
object);
3896 void MacroAssembler::AllocateHeapNumber(Register result,
3898 Label* gc_required) {
3900 AllocateInNewSpace(HeapNumber::kSize,
3913 void MacroAssembler::AllocateTwoByteString(Register result,
3918 Label* gc_required) {
3921 const int kHeaderAlignment = SeqTwoByteString::kHeaderSize &
3925 lea(scratch1, Operand(length, length,
times_1, kObjectAlignmentMask +
3927 and_(scratch1, Immediate(~kObjectAlignmentMask));
3928 if (kHeaderAlignment > 0) {
3929 subq(scratch1, Immediate(kHeaderAlignment));
3933 AllocateInNewSpace(SeqTwoByteString::kHeaderSize,
3945 Integer32ToSmi(scratch1, length);
3946 movq(
FieldOperand(result, String::kLengthOffset), scratch1);
3948 Immediate(String::kEmptyHashField));
3952 void MacroAssembler::AllocateAsciiString(Register result,
3957 Label* gc_required) {
3960 const int kHeaderAlignment = SeqAsciiString::kHeaderSize &
3962 movl(scratch1, length);
3964 addq(scratch1, Immediate(kObjectAlignmentMask + kHeaderAlignment));
3965 and_(scratch1, Immediate(~kObjectAlignmentMask));
3966 if (kHeaderAlignment > 0) {
3967 subq(scratch1, Immediate(kHeaderAlignment));
3971 AllocateInNewSpace(SeqAsciiString::kHeaderSize,
3983 Integer32ToSmi(scratch1, length);
3984 movq(
FieldOperand(result, String::kLengthOffset), scratch1);
3986 Immediate(String::kEmptyHashField));
3990 void MacroAssembler::AllocateTwoByteConsString(Register result,
3993 Label* gc_required) {
3995 AllocateInNewSpace(ConsString::kSize,
4008 void MacroAssembler::AllocateAsciiConsString(Register result,
4011 Label* gc_required) {
4013 AllocateInNewSpace(ConsString::kSize,
4026 void MacroAssembler::AllocateTwoByteSlicedString(Register result,
4029 Label* gc_required) {
4031 AllocateInNewSpace(SlicedString::kSize,
4044 void MacroAssembler::AllocateAsciiSlicedString(Register result,
4047 Label* gc_required) {
4049 AllocateInNewSpace(SlicedString::kSize,
4069 void MacroAssembler::CopyBytes(Register destination,
4075 if (emit_debug_code()) {
4076 cmpl(length, Immediate(min_length));
4079 Label loop, done, short_string, short_loop;
4081 const int kLongStringLimit = 20;
4082 if (min_length <= kLongStringLimit) {
4083 cmpl(length, Immediate(kLongStringLimit));
4094 movq(scratch, length);
4095 shrl(length, Immediate(3));
4098 andl(scratch, Immediate(0x7));
4099 movq(length, Operand(source, scratch,
times_1, -8));
4100 movq(Operand(destination, scratch,
times_1, -8), length);
4101 addq(destination, scratch);
4103 if (min_length <= kLongStringLimit) {
4106 bind(&short_string);
4107 if (min_length == 0) {
4108 testl(length, length);
4111 lea(scratch, Operand(destination, length,
times_1, 0));
4114 movb(length, Operand(source, 0));
4115 movb(Operand(destination, 0), length);
4118 cmpq(destination, scratch);
4126 void MacroAssembler::InitializeFieldsWithFiller(Register start_offset,
4127 Register end_offset,
4132 movq(Operand(start_offset, 0), filler);
4133 addq(start_offset, Immediate(kPointerSize));
4135 cmpq(start_offset, end_offset);
4140 void MacroAssembler::LoadContext(Register dst,
int context_chain_length) {
4141 if (context_chain_length > 0) {
4143 movq(dst, Operand(
rsi, Context::SlotOffset(Context::PREVIOUS_INDEX)));
4144 for (
int i = 1; i < context_chain_length; i++) {
4145 movq(dst, Operand(dst, Context::SlotOffset(Context::PREVIOUS_INDEX)));
4158 if (emit_debug_code()) {
4160 Heap::kWithContextMapRootIndex);
4161 Check(
not_equal,
"Variable resolved to with context.");
4166 void MacroAssembler::LoadTransitionedArrayMapConditional(
4169 Register map_in_out,
4171 Label* no_map_match) {
4174 Operand(
rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
4175 movq(scratch,
FieldOperand(scratch, GlobalObject::kNativeContextOffset));
4178 movq(scratch, Operand(scratch,
4179 Context::SlotOffset(Context::JS_ARRAY_MAPS_INDEX)));
4181 int offset = expected_kind * kPointerSize +
4182 FixedArrayBase::kHeaderSize;
4187 offset = transitioned_kind * kPointerSize +
4188 FixedArrayBase::kHeaderSize;
4193 void MacroAssembler::LoadInitialArrayMap(
4194 Register function_in, Register scratch,
4195 Register map_out,
bool can_have_holes) {
4196 ASSERT(!function_in.is(map_out));
4199 JSFunction::kPrototypeOrInitialMapOffset));
4200 if (!FLAG_smi_only_arrays) {
4207 }
else if (can_have_holes) {
4218 static const int kRegisterPassedArguments = 4;
4220 static const int kRegisterPassedArguments = 6;
4223 void MacroAssembler::LoadGlobalFunction(
int index, Register
function) {
4226 Operand(
rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
4228 movq(
function,
FieldOperand(
function, GlobalObject::kNativeContextOffset));
4230 movq(
function, Operand(
function, Context::SlotOffset(index)));
4234 void MacroAssembler::LoadGlobalFunctionInitialMap(Register
function,
4237 movq(map,
FieldOperand(
function, JSFunction::kPrototypeOrInitialMapOffset));
4238 if (emit_debug_code()) {
4240 CheckMap(map, isolate()->factory()->meta_map(), &fail,
DO_SMI_CHECK);
4243 Abort(
"Global functions must have initial map");
4249 int MacroAssembler::ArgumentStackSlotsForCFunctionCall(
int num_arguments) {
4256 ASSERT(num_arguments >= 0);
4258 const int kMinimumStackSlots = kRegisterPassedArguments;
4259 if (num_arguments < kMinimumStackSlots)
return kMinimumStackSlots;
4260 return num_arguments;
4262 if (num_arguments < kRegisterPassedArguments)
return 0;
4263 return num_arguments - kRegisterPassedArguments;
4268 void MacroAssembler::PrepareCallCFunction(
int num_arguments) {
4269 int frame_alignment = OS::ActivationFrameAlignment();
4270 ASSERT(frame_alignment != 0);
4271 ASSERT(num_arguments >= 0);
4276 int argument_slots_on_stack =
4277 ArgumentStackSlotsForCFunctionCall(num_arguments);
4278 subq(
rsp, Immediate((argument_slots_on_stack + 1) * kPointerSize));
4279 and_(
rsp, Immediate(-frame_alignment));
4284 void MacroAssembler::CallCFunction(ExternalReference
function,
4285 int num_arguments) {
4286 LoadAddress(
rax,
function);
4287 CallCFunction(
rax, num_arguments);
4291 void MacroAssembler::CallCFunction(Register
function,
int num_arguments) {
4294 if (emit_debug_code()) {
4295 CheckStackAlignment();
4299 ASSERT(OS::ActivationFrameAlignment() != 0);
4300 ASSERT(num_arguments >= 0);
4301 int argument_slots_on_stack =
4302 ArgumentStackSlotsForCFunctionCall(num_arguments);
4303 movq(
rsp, Operand(
rsp, argument_slots_on_stack * kPointerSize));
4307 bool AreAliased(Register r1, Register r2, Register
r3, Register
r4) {
4308 if (r1.is(r2))
return true;
4309 if (r1.is(r3))
return true;
4310 if (r1.is(r4))
return true;
4311 if (r2.is(r3))
return true;
4312 if (r2.is(r4))
return true;
4313 if (r3.is(r4))
return true;
4318 CodePatcher::CodePatcher(
byte* address,
int size)
4319 : address_(address),
4321 masm_(
NULL, address, size + Assembler::kGap) {
4325 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
4329 CodePatcher::~CodePatcher() {
4331 CPU::FlushICache(address_, size_);
4334 ASSERT(masm_.pc_ == address_ + size_);
4335 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
4339 void MacroAssembler::CheckPageFlag(
4344 Label* condition_met,
4345 Label::Distance condition_met_distance) {
4347 if (scratch.is(
object)) {
4348 and_(scratch, Immediate(~Page::kPageAlignmentMask));
4350 movq(scratch, Immediate(~Page::kPageAlignmentMask));
4351 and_(scratch,
object);
4354 testb(Operand(scratch, MemoryChunk::kFlagsOffset),
4355 Immediate(static_cast<uint8_t>(mask)));
4357 testl(Operand(scratch, MemoryChunk::kFlagsOffset), Immediate(mask));
4359 j(cc, condition_met, condition_met_distance);
4363 void MacroAssembler::JumpIfBlack(Register
object,
4364 Register bitmap_scratch,
4365 Register mask_scratch,
4367 Label::Distance on_black_distance) {
4369 GetMarkBits(
object, bitmap_scratch, mask_scratch);
4371 ASSERT(strcmp(Marking::kBlackBitPattern,
"10") == 0);
4374 movq(
rcx, mask_scratch);
4377 lea(
rcx, Operand(mask_scratch, mask_scratch,
times_2, 0));
4379 and_(
rcx, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
4380 cmpq(mask_scratch,
rcx);
4381 j(
equal, on_black, on_black_distance);
4388 void MacroAssembler::JumpIfDataObject(
4391 Label* not_data_object,
4392 Label::Distance not_data_object_distance) {
4393 Label is_data_object;
4394 movq(scratch,
FieldOperand(value, HeapObject::kMapOffset));
4395 CompareRoot(scratch, Heap::kHeapNumberMapRootIndex);
4396 j(
equal, &is_data_object, Label::kNear);
4403 j(
not_zero, not_data_object, not_data_object_distance);
4404 bind(&is_data_object);
4408 void MacroAssembler::GetMarkBits(Register addr_reg,
4409 Register bitmap_reg,
4410 Register mask_reg) {
4412 movq(bitmap_reg, addr_reg);
4414 and_(bitmap_reg, Immediate(~Page::kPageAlignmentMask));
4415 movq(
rcx, addr_reg);
4418 shrl(
rcx, Immediate(shift));
4420 Immediate((Page::kPageAlignmentMask >> shift) &
4421 ~(Bitmap::kBytesPerCell - 1)));
4423 addq(bitmap_reg,
rcx);
4424 movq(
rcx, addr_reg);
4426 and_(
rcx, Immediate((1 << Bitmap::kBitsPerCellLog2) - 1));
4427 movl(mask_reg, Immediate(1));
4432 void MacroAssembler::EnsureNotWhite(
4434 Register bitmap_scratch,
4435 Register mask_scratch,
4436 Label* value_is_white_and_not_data,
4437 Label::Distance distance) {
4439 GetMarkBits(value, bitmap_scratch, mask_scratch);
4442 ASSERT(strcmp(Marking::kWhiteBitPattern,
"00") == 0);
4443 ASSERT(strcmp(Marking::kBlackBitPattern,
"10") == 0);
4444 ASSERT(strcmp(Marking::kGreyBitPattern,
"11") == 0);
4445 ASSERT(strcmp(Marking::kImpossibleBitPattern,
"01") == 0);
4451 testq(Operand(bitmap_scratch, MemoryChunk::kHeaderSize), mask_scratch);
4454 if (emit_debug_code()) {
4459 addq(mask_scratch, mask_scratch);
4460 testq(Operand(bitmap_scratch, MemoryChunk::kHeaderSize), mask_scratch);
4461 j(
zero, &ok, Label::kNear);
4470 Register length =
rcx;
4471 Label not_heap_number;
4472 Label is_data_object;
4475 movq(map,
FieldOperand(value, HeapObject::kMapOffset));
4476 CompareRoot(map, Heap::kHeapNumberMapRootIndex);
4477 j(
not_equal, ¬_heap_number, Label::kNear);
4478 movq(length, Immediate(HeapNumber::kSize));
4479 jmp(&is_data_object, Label::kNear);
4481 bind(¬_heap_number);
4487 Register instance_type =
rcx;
4488 movzxbl(instance_type,
FieldOperand(map, Map::kInstanceTypeOffset));
4490 j(
not_zero, value_is_white_and_not_data);
4500 j(
zero, ¬_external, Label::kNear);
4501 movq(length, Immediate(ExternalString::kSize));
4502 jmp(&is_data_object, Label::kNear);
4504 bind(¬_external);
4509 addq(length, Immediate(0x04));
4511 imul(length,
FieldOperand(value, String::kLengthOffset));
4513 addq(length, Immediate(SeqString::kHeaderSize + kObjectAlignmentMask));
4514 and_(length, Immediate(~kObjectAlignmentMask));
4516 bind(&is_data_object);
4519 or_(Operand(bitmap_scratch, MemoryChunk::kHeaderSize), mask_scratch);
4521 and_(bitmap_scratch, Immediate(~Page::kPageAlignmentMask));
4522 addl(Operand(bitmap_scratch, MemoryChunk::kLiveBytesOffset), length);
4528 void MacroAssembler::CheckEnumCache(Register null_value, Label* call_runtime) {
4530 Register empty_fixed_array_value =
r8;
4531 LoadRoot(empty_fixed_array_value, Heap::kEmptyFixedArrayRootIndex);
4539 Cmp(
rdx, Smi::FromInt(Map::kInvalidEnumCache));
4540 j(
equal, call_runtime);
4550 Cmp(
rdx, Smi::FromInt(0));
4557 cmpq(empty_fixed_array_value,
4562 cmpq(
rcx, null_value);
4569 #endif // V8_TARGET_ARCH_X64
const intptr_t kSmiTagMask
#define CHECK_EQ(expected, value)
const uint32_t kNaNOrInfinityLowerBoundUpper32
bool is_intn(int x, int n)
#define ASSERT_NOT_NULL(p)
const int kNumSafepointSavedRegisters
bool AreAliased(Register r1, Register r2, Register r3, Register r4)
#define ASSERT(condition)
bool CanTransitionToMoreGeneralFastElementsKind(ElementsKind elements_kind, bool allow_only_packed)
const int kPointerSizeLog2
const uint32_t kStringRepresentationMask
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print more details following each garbage collection print amount of external allocated memory after each time it is adjusted flush code that we expect not to use again before full gc do incremental marking steps track object counts and memory usage use caching Perform compaction on every full GC Never perform compaction on full GC testing only Compact code space on full incremental collections Default seed for initializing random allows verbose printing trace parsing and preparsing Check icache flushes in ARM and MIPS simulator Stack alingment in bytes in print stack trace when throwing exceptions randomize hashes to avoid predictable hash Fixed seed to use to hash property activate a timer that switches between V8 threads testing_bool_flag float flag Seed used for threading test randomness A filename with extra code to be included in the Print usage including flags
bool is_uint32(int64_t x)
const intptr_t kObjectAlignmentMask
bool IsFastElementsKind(ElementsKind kind)
const intptr_t kHeapObjectTagMask
const uint32_t kNotStringTag
bool IsFastPackedElementsKind(ElementsKind kind)
const uint32_t kIsIndirectStringMask
Operand FieldOperand(Register object, int offset)
bool IsAligned(T value, U alignment)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print more details following each garbage collection print amount of external allocated memory after each time it is adjusted flush code that we expect not to use again before full gc do incremental marking steps track object counts and memory usage use caching Perform compaction on every full GC Never perform compaction on full GC testing only Compact code space on full incremental collections Default seed for initializing random allows verbose printing trace parsing and preparsing Check icache flushes in ARM and MIPS simulator Stack alingment in bytes in print stack trace when throwing exceptions randomize hashes to avoid predictable hash Fixed seed to use to hash property activate a timer that switches between V8 threads testing_bool_flag float flag Seed used for threading test randomness A filename with extra code to be included in the Print usage message
const uint32_t kHoleNanLower32
Operand StackSpaceOperand(int index)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
int TenToThe(int exponent)
const int kRootRegisterBias
MacroAssembler(Isolate *isolate, void *buffer, int size)
const uint32_t kStringTag
activate correct semantics for inheriting readonliness false
const uint32_t kIsNotStringMask
const int kNumSafepointRegisters
const Register kScratchRegister
v8::Handle< v8::Value > Load(const v8::Arguments &args)
ElementsKind GetNextMoreGeneralFastElementsKind(ElementsKind elements_kind, bool allow_only_packed)
Condition NegateCondition(Condition cond)
#define ASSERT_EQ(v1, v2)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if NULL
const int kSmiConstantRegisterValue
const uint32_t kIsIndirectStringTag
#define RUNTIME_ENTRY(name, nargs, ressize)
const Register kSmiConstantRegister
#define STATIC_ASSERT(test)
const uint32_t kAsciiStringTag
const uint32_t kStringEncodingMask
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset kAccessCheckInfoOffset kEvalFrominstructionsOffsetOffset kInstanceClassNameOffset flag