30 #if defined(V8_TARGET_ARCH_X64)
44 : Assembler(arg_isolate, buffer, size),
45 generating_stub_(
false),
46 allow_stub_calls_(
true),
48 root_array_available_(
true) {
49 if (isolate() !=
NULL) {
50 code_object_ = Handle<Object>(isolate()->heap()->undefined_value(),
56 static intptr_t RootRegisterDelta(ExternalReference other, Isolate* isolate) {
58 reinterpret_cast<Address>(isolate->heap()->roots_array_start());
59 intptr_t delta = other.address() - roots_register_value;
64 Operand MacroAssembler::ExternalOperand(ExternalReference target,
66 if (root_array_available_ && !Serializer::enabled()) {
67 intptr_t delta = RootRegisterDelta(target, isolate());
69 Serializer::TooLateToEnableNow();
73 movq(scratch, target);
74 return Operand(scratch, 0);
79 if (root_array_available_ && !Serializer::enabled()) {
80 intptr_t delta = RootRegisterDelta(source, isolate());
82 Serializer::TooLateToEnableNow();
83 movq(destination, Operand(
kRootRegister, static_cast<int32_t>(delta)));
88 if (destination.is(
rax)) {
97 void MacroAssembler::Store(ExternalReference destination, Register source) {
98 if (root_array_available_ && !Serializer::enabled()) {
99 intptr_t delta = RootRegisterDelta(destination, isolate());
101 Serializer::TooLateToEnableNow();
102 movq(Operand(
kRootRegister, static_cast<int32_t>(delta)), source);
107 if (source.is(
rax)) {
108 store_rax(destination);
116 void MacroAssembler::LoadAddress(Register destination,
117 ExternalReference source) {
118 if (root_array_available_ && !Serializer::enabled()) {
119 intptr_t delta = RootRegisterDelta(source, isolate());
121 Serializer::TooLateToEnableNow();
122 lea(destination, Operand(
kRootRegister, static_cast<int32_t>(delta)));
127 movq(destination, source);
131 int MacroAssembler::LoadAddressSize(ExternalReference source) {
132 if (root_array_available_ && !Serializer::enabled()) {
136 intptr_t delta = RootRegisterDelta(source, isolate());
138 Serializer::TooLateToEnableNow();
142 if (!
is_int8(static_cast<int32_t>(delta))) {
153 void MacroAssembler::PushAddress(ExternalReference source) {
154 int64_t address =
reinterpret_cast<int64_t
>(source.address());
155 if (
is_int32(address) && !Serializer::enabled()) {
156 if (emit_debug_code()) {
159 push(Immediate(static_cast<int32_t>(address)));
167 void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) {
168 ASSERT(root_array_available_);
174 void MacroAssembler::LoadRootIndexed(Register destination,
175 Register variable_offset,
177 ASSERT(root_array_available_);
185 void MacroAssembler::StoreRoot(Register source, Heap::RootListIndex index) {
186 ASSERT(root_array_available_);
192 void MacroAssembler::PushRoot(Heap::RootListIndex index) {
193 ASSERT(root_array_available_);
198 void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
199 ASSERT(root_array_available_);
205 void MacroAssembler::CompareRoot(
const Operand& with,
206 Heap::RootListIndex index) {
207 ASSERT(root_array_available_);
214 void MacroAssembler::RememberedSetHelper(Register
object,
218 RememberedSetFinalAction and_then) {
219 if (FLAG_debug_code) {
221 JumpIfNotInNewSpace(
object, scratch, &ok, Label::kNear);
226 LoadRoot(scratch, Heap::kStoreBufferTopRootIndex);
228 movq(Operand(scratch, 0), addr);
232 StoreRoot(scratch, Heap::kStoreBufferTopRootIndex);
236 testq(scratch, Immediate(StoreBuffer::kStoreBufferOverflowBit));
237 if (and_then == kReturnAtEnd) {
238 Label buffer_overflowed;
239 j(
not_equal, &buffer_overflowed, Label::kNear);
241 bind(&buffer_overflowed);
243 ASSERT(and_then == kFallThroughAtEnd);
244 j(
equal, &done, Label::kNear);
246 StoreBufferOverflowStub store_buffer_overflow =
247 StoreBufferOverflowStub(save_fp);
248 CallStub(&store_buffer_overflow);
249 if (and_then == kReturnAtEnd) {
252 ASSERT(and_then == kFallThroughAtEnd);
258 void MacroAssembler::InNewSpace(Register
object,
262 Label::Distance distance) {
263 if (Serializer::enabled()) {
268 if (scratch.is(
object)) {
272 movq(scratch, ExternalReference::new_space_mask(isolate()));
273 and_(scratch,
object);
277 j(cc, branch, distance);
280 intptr_t new_space_start =
281 reinterpret_cast<intptr_t
>(
HEAP->NewSpaceStart());
283 if (scratch.is(
object)) {
288 and_(scratch, Immediate(static_cast<int32_t>(
HEAP->NewSpaceMask())));
289 j(cc, branch, distance);
294 void MacroAssembler::RecordWriteField(
313 JumpIfSmi(value, &done);
321 if (emit_debug_code()) {
324 j(
zero, &ok, Label::kNear);
330 object, dst, value, save_fp, remembered_set_action,
OMIT_SMI_CHECK);
336 if (emit_debug_code()) {
343 void MacroAssembler::RecordWriteArray(Register
object,
355 JumpIfSmi(value, &done);
359 Register dst = index;
364 object, dst, value, save_fp, remembered_set_action,
OMIT_SMI_CHECK);
370 if (emit_debug_code()) {
377 void MacroAssembler::RecordWrite(Register
object,
388 ASSERT(!
object.is(value));
389 ASSERT(!
object.is(address));
390 ASSERT(!value.is(address));
391 if (emit_debug_code()) {
396 !FLAG_incremental_marking) {
400 if (FLAG_debug_code) {
402 cmpq(value, Operand(address, 0));
403 j(
equal, &ok, Label::kNear);
414 JumpIfSmi(value, &done);
419 MemoryChunk::kPointersToHereAreInterestingMask,
424 CheckPageFlag(
object,
426 MemoryChunk::kPointersFromHereAreInterestingMask,
431 RecordWriteStub stub(
object, value, address, remembered_set_action, fp_mode);
438 if (emit_debug_code()) {
445 void MacroAssembler::Assert(
Condition cc,
const char* msg) {
446 if (emit_debug_code()) Check(cc, msg);
450 void MacroAssembler::AssertFastElements(Register elements) {
451 if (emit_debug_code()) {
453 CompareRoot(
FieldOperand(elements, HeapObject::kMapOffset),
454 Heap::kFixedArrayMapRootIndex);
455 j(
equal, &ok, Label::kNear);
456 CompareRoot(
FieldOperand(elements, HeapObject::kMapOffset),
457 Heap::kFixedDoubleArrayMapRootIndex);
458 j(
equal, &ok, Label::kNear);
459 CompareRoot(
FieldOperand(elements, HeapObject::kMapOffset),
460 Heap::kFixedCOWArrayMapRootIndex);
461 j(
equal, &ok, Label::kNear);
462 Abort(
"JSObject with fast elements map has slow elements");
468 void MacroAssembler::Check(
Condition cc,
const char* msg) {
470 j(cc, &L, Label::kNear);
477 void MacroAssembler::CheckStackAlignment() {
478 int frame_alignment = OS::ActivationFrameAlignment();
479 int frame_alignment_mask = frame_alignment - 1;
482 Label alignment_as_expected;
483 testq(
rsp, Immediate(frame_alignment_mask));
484 j(
zero, &alignment_as_expected, Label::kNear);
487 bind(&alignment_as_expected);
492 void MacroAssembler::NegativeZeroTest(Register result,
496 testl(result, result);
504 void MacroAssembler::Abort(
const char* msg) {
510 intptr_t
p1 =
reinterpret_cast<intptr_t
>(msg);
513 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
516 RecordComment(
"Abort message: ");
524 reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(p1 - p0))),
532 CallRuntime(Runtime::kAbort, 2);
534 CallRuntime(Runtime::kAbort, 2);
541 void MacroAssembler::CallStub(CodeStub* stub,
unsigned ast_id) {
542 ASSERT(AllowThisStubCall(stub));
543 Call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id);
547 void MacroAssembler::TailCallStub(CodeStub* stub) {
548 ASSERT(allow_stub_calls_ || stub->CompilingCallsToThisStubIsGCSafe());
549 Jump(stub->GetCode(), RelocInfo::CODE_TARGET);
553 void MacroAssembler::StubReturn(
int argc) {
554 ASSERT(argc >= 1 && generating_stub());
559 bool MacroAssembler::AllowThisStubCall(CodeStub* stub) {
560 if (!has_frame_ && stub->SometimesSetsUpAFrame())
return false;
561 return allow_stub_calls_ || stub->CompilingCallsToThisStubIsGCSafe();
565 void MacroAssembler::IllegalOperation(
int num_arguments) {
566 if (num_arguments > 0) {
569 LoadRoot(
rax, Heap::kUndefinedValueRootIndex);
573 void MacroAssembler::IndexFromHash(Register hash, Register index) {
578 (1 << String::kArrayIndexValueBits));
583 and_(hash, Immediate(String::kArrayIndexValueMask));
584 shr(hash, Immediate(String::kHashShift));
588 Integer32ToSmi(index, hash);
592 void MacroAssembler::CallRuntime(Runtime::FunctionId
id,
int num_arguments) {
593 CallRuntime(Runtime::FunctionForId(
id), num_arguments);
597 void MacroAssembler::CallRuntimeSaveDoubles(Runtime::FunctionId
id) {
598 const Runtime::Function*
function = Runtime::FunctionForId(
id);
599 Set(
rax, function->nargs);
600 LoadAddress(
rbx, ExternalReference(
function, isolate()));
606 void MacroAssembler::CallRuntime(
const Runtime::Function* f,
611 if (f->nargs >= 0 && f->nargs != num_arguments) {
612 IllegalOperation(num_arguments);
620 Set(
rax, num_arguments);
621 LoadAddress(
rbx, ExternalReference(f, isolate()));
622 CEntryStub ces(f->result_size);
627 void MacroAssembler::CallExternalReference(
const ExternalReference& ext,
629 Set(
rax, num_arguments);
630 LoadAddress(
rbx, ext);
637 void MacroAssembler::TailCallExternalReference(
const ExternalReference& ext,
651 Set(
rax, num_arguments);
652 JumpToExternalReference(ext, result_size);
656 void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
659 TailCallExternalReference(ExternalReference(fid, isolate()),
665 static int Offset(ExternalReference ref0, ExternalReference ref1) {
666 int64_t offset = (ref0.address() - ref1.address());
668 ASSERT(static_cast<int>(offset) == offset);
669 return static_cast<int>(offset);
673 void MacroAssembler::PrepareCallApiFunction(
int arg_stack_space) {
674 #if defined(_WIN64) && !defined(__MINGW64__)
677 EnterApiExitFrame(arg_stack_space + 1);
682 EnterApiExitFrame(arg_stack_space);
687 void MacroAssembler::CallApiFunctionAndReturn(
Address function_address,
691 Label promote_scheduled_exception;
692 Label delete_allocated_handles;
693 Label leave_exit_frame;
696 Factory* factory = isolate()->factory();
697 ExternalReference next_address =
698 ExternalReference::handle_scope_next_address();
699 const int kNextOffset = 0;
700 const int kLimitOffset =
Offset(
701 ExternalReference::handle_scope_limit_address(),
703 const int kLevelOffset =
Offset(
704 ExternalReference::handle_scope_level_address(),
706 ExternalReference scheduled_exception_address =
707 ExternalReference::scheduled_exception_address(isolate());
710 Register prev_next_address_reg =
r14;
711 Register prev_limit_reg =
rbx;
712 Register base_reg =
r15;
713 movq(base_reg, next_address);
714 movq(prev_next_address_reg, Operand(base_reg, kNextOffset));
715 movq(prev_limit_reg, Operand(base_reg, kLimitOffset));
716 addl(Operand(base_reg, kLevelOffset), Immediate(1));
718 movq(
rax, reinterpret_cast<int64_t>(function_address),
722 #if defined(_WIN64) && !defined(__MINGW64__)
724 movq(
rax, Operand(
rax, 0));
728 j(
zero, &empty_result);
730 movq(
rax, Operand(
rax, 0));
735 subl(Operand(base_reg, kLevelOffset), Immediate(1));
736 movq(Operand(base_reg, kNextOffset), prev_next_address_reg);
737 cmpq(prev_limit_reg, Operand(base_reg, kLimitOffset));
739 bind(&leave_exit_frame);
742 movq(
rsi, scheduled_exception_address);
743 Cmp(Operand(
rsi, 0), factory->the_hole_value());
744 j(
not_equal, &promote_scheduled_exception);
749 bind(&promote_scheduled_exception);
750 TailCallRuntime(Runtime::kPromoteScheduledException, 0, 1);
754 Move(
rax, factory->undefined_value());
758 bind(&delete_allocated_handles);
759 movq(Operand(base_reg, kLimitOffset), prev_limit_reg);
760 movq(prev_limit_reg,
rax);
762 LoadAddress(
rcx, ExternalReference::isolate_address());
764 LoadAddress(
rdi, ExternalReference::isolate_address());
767 ExternalReference::delete_handle_scope_extensions(isolate()));
769 movq(
rax, prev_limit_reg);
770 jmp(&leave_exit_frame);
774 void MacroAssembler::JumpToExternalReference(
const ExternalReference& ext,
777 LoadAddress(
rbx, ext);
778 CEntryStub ces(result_size);
779 jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
783 void MacroAssembler::InvokeBuiltin(Builtins::JavaScript
id,
785 const CallWrapper& call_wrapper) {
792 ParameterCount expected(0);
793 GetBuiltinEntry(
rdx,
id);
798 void MacroAssembler::GetBuiltinFunction(Register target,
799 Builtins::JavaScript
id) {
801 movq(target, Operand(
rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
802 movq(target,
FieldOperand(target, GlobalObject::kBuiltinsOffset));
804 JSBuiltinsObject::OffsetOfFunctionWithId(
id)));
808 void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript
id) {
811 GetBuiltinFunction(
rdi,
id);
816 #define REG(Name) { kRegister_ ## Name ## _Code }
818 static const Register saved_regs[] = {
825 static const int kNumberOfSavedRegs =
sizeof(saved_regs) /
sizeof(Register);
831 Register exclusion3) {
835 for (
int i = 0; i < kNumberOfSavedRegs; i++) {
836 Register reg = saved_regs[i];
837 if (!reg.is(exclusion1) && !reg.is(exclusion2) && !reg.is(exclusion3)) {
843 CpuFeatures::Scope scope(
SSE2);
846 XMMRegister reg = XMMRegister::from_code(i);
856 Register exclusion3) {
858 CpuFeatures::Scope scope(
SSE2);
860 XMMRegister reg = XMMRegister::from_code(i);
865 for (
int i = kNumberOfSavedRegs - 1; i >= 0; i--) {
866 Register reg = saved_regs[i];
867 if (!reg.is(exclusion1) && !reg.is(exclusion2) && !reg.is(exclusion3)) {
874 void MacroAssembler::Set(Register dst, int64_t x) {
878 movl(dst, Immediate(static_cast<uint32_t>(x)));
880 movq(dst, Immediate(static_cast<int32_t>(x)));
886 void MacroAssembler::Set(
const Operand& dst, int64_t x) {
888 movq(dst, Immediate(static_cast<int32_t>(x)));
898 Register MacroAssembler::GetSmiConstant(Smi* source) {
899 int value = source->value();
911 void MacroAssembler::LoadSmiConstant(Register dst, Smi* source) {
912 if (emit_debug_code()) {
917 if (allow_stub_calls()) {
918 Assert(
equal,
"Uninitialized kSmiConstantRegister");
921 j(
equal, &ok, Label::kNear);
926 int value = source->value();
932 unsigned int uvalue = negative ? -value : value;
971 void MacroAssembler::Integer32ToSmi(Register dst, Register src) {
976 shl(dst, Immediate(kSmiShift));
980 void MacroAssembler::Integer32ToSmiField(
const Operand& dst, Register src) {
981 if (emit_debug_code()) {
982 testb(dst, Immediate(0x01));
984 j(
zero, &ok, Label::kNear);
985 if (allow_stub_calls()) {
986 Abort(
"Integer32ToSmiField writing to non-smi location");
997 void MacroAssembler::Integer64PlusConstantToSmi(Register dst,
1001 addl(dst, Immediate(constant));
1003 leal(dst, Operand(src, constant));
1005 shl(dst, Immediate(kSmiShift));
1009 void MacroAssembler::SmiToInteger32(Register dst, Register src) {
1014 shr(dst, Immediate(kSmiShift));
1018 void MacroAssembler::SmiToInteger32(Register dst,
const Operand& src) {
1023 void MacroAssembler::SmiToInteger64(Register dst, Register src) {
1028 sar(dst, Immediate(kSmiShift));
1032 void MacroAssembler::SmiToInteger64(Register dst,
const Operand& src) {
1037 void MacroAssembler::SmiTest(Register src) {
1042 void MacroAssembler::SmiCompare(Register smi1, Register smi2) {
1043 if (emit_debug_code()) {
1044 AbortIfNotSmi(smi1);
1045 AbortIfNotSmi(smi2);
1051 void MacroAssembler::SmiCompare(Register dst, Smi* src) {
1052 if (emit_debug_code()) {
1059 void MacroAssembler::Cmp(Register dst, Smi* src) {
1061 if (src->value() == 0) {
1064 Register constant_reg = GetSmiConstant(src);
1065 cmpq(dst, constant_reg);
1070 void MacroAssembler::SmiCompare(Register dst,
const Operand& src) {
1071 if (emit_debug_code()) {
1079 void MacroAssembler::SmiCompare(
const Operand& dst, Register src) {
1080 if (emit_debug_code()) {
1088 void MacroAssembler::SmiCompare(
const Operand& dst, Smi* src) {
1089 if (emit_debug_code()) {
1092 cmpl(Operand(dst, kSmiShift /
kBitsPerByte), Immediate(src->value()));
1096 void MacroAssembler::Cmp(
const Operand& dst, Smi* src) {
1098 Register smi_reg = GetSmiConstant(src);
1099 ASSERT(!dst.AddressUsesRegister(smi_reg));
1104 void MacroAssembler::SmiCompareInteger32(
const Operand& dst, Register src) {
1109 void MacroAssembler::PositiveSmiTimesPowerOfTwoToInteger64(Register dst,
1115 SmiToInteger64(dst, src);
1121 if (power < kSmiShift) {
1122 sar(dst, Immediate(kSmiShift - power));
1123 }
else if (power > kSmiShift) {
1124 shl(dst, Immediate(power - kSmiShift));
1129 void MacroAssembler::PositiveSmiDivPowerOfTwoToInteger32(Register dst,
1132 ASSERT((0 <= power) && (power < 32));
1134 shr(dst, Immediate(power + kSmiShift));
1141 void MacroAssembler::SmiOrIfSmis(Register dst, Register src1, Register src2,
1143 Label::Distance near_jump) {
1144 if (dst.is(src1) || dst.is(src2)) {
1154 JumpIfNotSmi(dst, on_not_smis, near_jump);
1159 Condition MacroAssembler::CheckSmi(Register src) {
1166 Condition MacroAssembler::CheckSmi(
const Operand& src) {
1173 Condition MacroAssembler::CheckNonNegativeSmi(Register src) {
1183 Condition MacroAssembler::CheckBothSmi(Register first, Register second) {
1184 if (first.is(second)) {
1185 return CheckSmi(first);
1194 Condition MacroAssembler::CheckBothNonNegativeSmi(Register first,
1196 if (first.is(second)) {
1197 return CheckNonNegativeSmi(first);
1207 Condition MacroAssembler::CheckEitherSmi(Register first,
1210 if (first.is(second)) {
1211 return CheckSmi(first);
1213 if (scratch.is(second)) {
1214 andl(scratch, first);
1216 if (!scratch.is(first)) {
1217 movl(scratch, first);
1219 andl(scratch, second);
1226 Condition MacroAssembler::CheckIsMinSmi(Register src) {
1234 Condition MacroAssembler::CheckInteger32ValidSmiValue(Register src) {
1240 Condition MacroAssembler::CheckUInteger32ValidSmiValue(Register src) {
1248 void MacroAssembler::CheckSmiToIndicator(Register dst, Register src) {
1258 void MacroAssembler::CheckSmiToIndicator(Register dst,
const Operand& src) {
1259 if (!(src.AddressUsesRegister(dst))) {
1269 void MacroAssembler::JumpIfNotValidSmiValue(Register src,
1271 Label::Distance near_jump) {
1272 Condition is_valid = CheckInteger32ValidSmiValue(src);
1277 void MacroAssembler::JumpIfUIntNotValidSmiValue(Register src,
1279 Label::Distance near_jump) {
1280 Condition is_valid = CheckUInteger32ValidSmiValue(src);
1285 void MacroAssembler::JumpIfSmi(Register src,
1287 Label::Distance near_jump) {
1289 j(smi, on_smi, near_jump);
1293 void MacroAssembler::JumpIfNotSmi(Register src,
1295 Label::Distance near_jump) {
1301 void MacroAssembler::JumpUnlessNonNegativeSmi(
1302 Register src, Label* on_not_smi_or_negative,
1303 Label::Distance near_jump) {
1304 Condition non_negative_smi = CheckNonNegativeSmi(src);
1305 j(
NegateCondition(non_negative_smi), on_not_smi_or_negative, near_jump);
1309 void MacroAssembler::JumpIfSmiEqualsConstant(Register src,
1312 Label::Distance near_jump) {
1313 SmiCompare(src, constant);
1314 j(
equal, on_equals, near_jump);
1318 void MacroAssembler::JumpIfNotBothSmi(Register src1,
1320 Label* on_not_both_smi,
1321 Label::Distance near_jump) {
1322 Condition both_smi = CheckBothSmi(src1, src2);
1327 void MacroAssembler::JumpUnlessBothNonNegativeSmi(Register src1,
1329 Label* on_not_both_smi,
1330 Label::Distance near_jump) {
1331 Condition both_smi = CheckBothNonNegativeSmi(src1, src2);
1336 void MacroAssembler::SmiTryAddConstant(Register dst,
1339 Label* on_not_smi_result,
1340 Label::Distance near_jump) {
1347 JumpIfNotSmi(src, on_not_smi_result, near_jump);
1349 LoadSmiConstant(tmp, constant);
1351 j(
overflow, on_not_smi_result, near_jump);
1358 void MacroAssembler::SmiAddConstant(Register dst, Register src, Smi* constant) {
1359 if (constant->value() == 0) {
1364 }
else if (dst.is(src)) {
1366 switch (constant->value()) {
1380 Register constant_reg = GetSmiConstant(constant);
1381 addq(dst, constant_reg);
1385 switch (constant->value()) {
1399 LoadSmiConstant(dst, constant);
1407 void MacroAssembler::SmiAddConstant(
const Operand& dst, Smi* constant) {
1408 if (constant->value() != 0) {
1409 addl(Operand(dst, kSmiShift /
kBitsPerByte), Immediate(constant->value()));
1414 void MacroAssembler::SmiAddConstant(Register dst,
1417 Label* on_not_smi_result,
1418 Label::Distance near_jump) {
1419 if (constant->value() == 0) {
1423 }
else if (dst.is(src)) {
1428 j(
overflow, on_not_smi_result, near_jump);
1431 LoadSmiConstant(dst, constant);
1433 j(
overflow, on_not_smi_result, near_jump);
1438 void MacroAssembler::SmiSubConstant(Register dst, Register src, Smi* constant) {
1439 if (constant->value() == 0) {
1443 }
else if (dst.is(src)) {
1445 Register constant_reg = GetSmiConstant(constant);
1446 subq(dst, constant_reg);
1448 if (constant->value() == Smi::kMinValue) {
1449 LoadSmiConstant(dst, constant);
1455 LoadSmiConstant(dst, Smi::FromInt(-constant->value()));
1462 void MacroAssembler::SmiSubConstant(Register dst,
1465 Label* on_not_smi_result,
1466 Label::Distance near_jump) {
1467 if (constant->value() == 0) {
1471 }
else if (dst.is(src)) {
1473 if (constant->value() == Smi::kMinValue) {
1477 j(
not_sign, on_not_smi_result, near_jump);
1484 j(
overflow, on_not_smi_result, near_jump);
1488 if (constant->value() == Smi::kMinValue) {
1492 j(
not_sign, on_not_smi_result, near_jump);
1493 LoadSmiConstant(dst, constant);
1499 LoadSmiConstant(dst, Smi::FromInt(-(constant->value())));
1501 j(
overflow, on_not_smi_result, near_jump);
1507 void MacroAssembler::SmiNeg(Register dst,
1509 Label* on_smi_result,
1510 Label::Distance near_jump) {
1529 void MacroAssembler::SmiAdd(Register dst,
1532 Label* on_not_smi_result,
1533 Label::Distance near_jump) {
1539 j(
overflow, on_not_smi_result, near_jump);
1544 j(
overflow, on_not_smi_result, near_jump);
1549 void MacroAssembler::SmiAdd(Register dst,
1551 const Operand& src2,
1552 Label* on_not_smi_result,
1553 Label::Distance near_jump) {
1558 j(
overflow, on_not_smi_result, near_jump);
1561 ASSERT(!src2.AddressUsesRegister(dst));
1564 j(
overflow, on_not_smi_result, near_jump);
1569 void MacroAssembler::SmiAdd(Register dst,
1574 if (!dst.is(src1)) {
1575 if (emit_debug_code()) {
1580 lea(dst, Operand(src1, src2,
times_1, 0));
1588 void MacroAssembler::SmiSub(Register dst,
1591 Label* on_not_smi_result,
1592 Label::Distance near_jump) {
1597 j(
overflow, on_not_smi_result, near_jump);
1602 j(
overflow, on_not_smi_result, near_jump);
1607 void MacroAssembler::SmiSub(Register dst, Register src1, Register src2) {
1611 if (!dst.is(src1)) {
1619 void MacroAssembler::SmiSub(Register dst,
1621 const Operand& src2,
1622 Label* on_not_smi_result,
1623 Label::Distance near_jump) {
1628 j(
overflow, on_not_smi_result, near_jump);
1633 j(
overflow, on_not_smi_result, near_jump);
1638 void MacroAssembler::SmiSub(Register dst,
1640 const Operand& src2) {
1643 if (!dst.is(src1)) {
1651 void MacroAssembler::SmiMul(Register dst,
1654 Label* on_not_smi_result,
1655 Label::Distance near_jump) {
1662 Label failure, zero_correct_result;
1664 SmiToInteger64(dst, src1);
1666 j(
overflow, &failure, Label::kNear);
1670 Label correct_result;
1672 j(
not_zero, &correct_result, Label::kNear);
1677 j(
positive, &zero_correct_result, Label::kNear);
1681 jmp(on_not_smi_result, near_jump);
1683 bind(&zero_correct_result);
1686 bind(&correct_result);
1688 SmiToInteger64(dst, src1);
1690 j(
overflow, on_not_smi_result, near_jump);
1693 Label correct_result;
1695 j(
not_zero, &correct_result, Label::kNear);
1700 j(negative, on_not_smi_result, near_jump);
1701 bind(&correct_result);
1706 void MacroAssembler::SmiDiv(Register dst,
1709 Label* on_not_smi_result,
1710 Label::Distance near_jump) {
1720 j(
zero, on_not_smi_result, near_jump);
1725 SmiToInteger32(
rax, src1);
1734 testl(
rax, Immediate(0x7fffffff));
1735 j(
not_zero, &safe_div, Label::kNear);
1738 j(
positive, &safe_div, Label::kNear);
1740 jmp(on_not_smi_result, near_jump);
1742 j(negative, on_not_smi_result, near_jump);
1746 SmiToInteger32(src2, src2);
1750 Integer32ToSmi(src2, src2);
1755 j(
zero, &smi_result, Label::kNear);
1757 jmp(on_not_smi_result, near_jump);
1760 j(
not_zero, on_not_smi_result, near_jump);
1762 if (!dst.is(src1) && src1.is(
rax)) {
1765 Integer32ToSmi(dst,
rax);
1769 void MacroAssembler::SmiMod(Register dst,
1772 Label* on_not_smi_result,
1773 Label::Distance near_jump) {
1783 j(
zero, on_not_smi_result, near_jump);
1788 SmiToInteger32(
rax, src1);
1789 SmiToInteger32(src2, src2);
1793 cmpl(
rax, Immediate(Smi::kMinValue));
1795 cmpl(src2, Immediate(-1));
1798 Integer32ToSmi(src2, src2);
1802 jmp(on_not_smi_result, near_jump);
1809 Integer32ToSmi(src2, src2);
1817 j(
not_zero, &smi_result, Label::kNear);
1819 j(negative, on_not_smi_result, near_jump);
1821 Integer32ToSmi(dst,
rdx);
1825 void MacroAssembler::SmiNot(Register dst, Register src) {
1839 void MacroAssembler::SmiAnd(Register dst, Register src1, Register src2) {
1841 if (!dst.is(src1)) {
1848 void MacroAssembler::SmiAndConstant(Register dst, Register src, Smi* constant) {
1849 if (constant->value() == 0) {
1851 }
else if (dst.is(src)) {
1853 Register constant_reg = GetSmiConstant(constant);
1854 and_(dst, constant_reg);
1856 LoadSmiConstant(dst, constant);
1862 void MacroAssembler::SmiOr(Register dst, Register src1, Register src2) {
1863 if (!dst.is(src1)) {
1871 void MacroAssembler::SmiOrConstant(Register dst, Register src, Smi* constant) {
1874 Register constant_reg = GetSmiConstant(constant);
1875 or_(dst, constant_reg);
1877 LoadSmiConstant(dst, constant);
1883 void MacroAssembler::SmiXor(Register dst, Register src1, Register src2) {
1884 if (!dst.is(src1)) {
1892 void MacroAssembler::SmiXorConstant(Register dst, Register src, Smi* constant) {
1895 Register constant_reg = GetSmiConstant(constant);
1896 xor_(dst, constant_reg);
1898 LoadSmiConstant(dst, constant);
1904 void MacroAssembler::SmiShiftArithmeticRightConstant(Register dst,
1908 if (shift_value > 0) {
1910 sar(dst, Immediate(shift_value + kSmiShift));
1911 shl(dst, Immediate(kSmiShift));
1919 void MacroAssembler::SmiShiftLeftConstant(Register dst,
1925 if (shift_value > 0) {
1926 shl(dst, Immediate(shift_value));
1931 void MacroAssembler::SmiShiftLogicalRightConstant(
1932 Register dst, Register src,
int shift_value,
1933 Label* on_not_smi_result, Label::Distance near_jump) {
1939 if (shift_value == 0) {
1941 j(negative, on_not_smi_result, near_jump);
1943 shr(dst, Immediate(shift_value + kSmiShift));
1944 shl(dst, Immediate(kSmiShift));
1949 void MacroAssembler::SmiShiftLeft(Register dst,
1954 if (!dst.is(src1)) {
1957 SmiToInteger32(
rcx, src2);
1959 and_(
rcx, Immediate(0x1f));
1964 void MacroAssembler::SmiShiftLogicalRight(Register dst,
1967 Label* on_not_smi_result,
1968 Label::Distance near_jump) {
1975 if (src1.is(
rcx) || src2.is(
rcx)) {
1978 if (!dst.is(src1)) {
1981 SmiToInteger32(
rcx, src2);
1982 orl(
rcx, Immediate(kSmiShift));
1984 shl(dst, Immediate(kSmiShift));
1986 if (src1.is(
rcx) || src2.is(
rcx)) {
1987 Label positive_result;
1988 j(
positive, &positive_result, Label::kNear);
1994 jmp(on_not_smi_result, near_jump);
1995 bind(&positive_result);
1998 j(negative, on_not_smi_result, near_jump);
2003 void MacroAssembler::SmiShiftArithmeticRight(Register dst,
2012 }
else if (src2.is(
rcx)) {
2015 if (!dst.is(src1)) {
2018 SmiToInteger32(
rcx, src2);
2019 orl(
rcx, Immediate(kSmiShift));
2021 shl(dst, Immediate(kSmiShift));
2024 }
else if (src2.is(
rcx)) {
2030 void MacroAssembler::SelectNonSmi(Register dst,
2034 Label::Distance near_jump) {
2042 if (allow_stub_calls()) {
2044 Check(not_both_smis,
"Both registers were smis in SelectNonSmi.");
2053 j(
not_zero, on_not_smis, near_jump);
2069 SmiIndex MacroAssembler::SmiToIndex(Register dst,
2078 if (shift < kSmiShift) {
2079 sar(dst, Immediate(kSmiShift - shift));
2081 shl(dst, Immediate(shift - kSmiShift));
2083 return SmiIndex(dst,
times_1);
2086 SmiIndex MacroAssembler::SmiToNegativeIndex(Register dst,
2095 if (shift < kSmiShift) {
2096 sar(dst, Immediate(kSmiShift - shift));
2098 shl(dst, Immediate(shift - kSmiShift));
2100 return SmiIndex(dst,
times_1);
2104 void MacroAssembler::AddSmiField(Register dst,
const Operand& src) {
2110 void MacroAssembler::JumpIfNotString(Register
object,
2111 Register object_map,
2113 Label::Distance near_jump) {
2115 j(is_smi, not_string, near_jump);
2121 void MacroAssembler::JumpIfNotBothSequentialAsciiStrings(
2122 Register first_object,
2123 Register second_object,
2127 Label::Distance near_jump) {
2129 Condition either_smi = CheckEitherSmi(first_object, second_object);
2130 j(either_smi, on_fail, near_jump);
2133 movq(scratch1,
FieldOperand(first_object, HeapObject::kMapOffset));
2134 movq(scratch2,
FieldOperand(second_object, HeapObject::kMapOffset));
2135 movzxbl(scratch1,
FieldOperand(scratch1, Map::kInstanceTypeOffset));
2136 movzxbl(scratch2,
FieldOperand(scratch2, Map::kInstanceTypeOffset));
2140 const int kFlatAsciiStringMask =
2144 andl(scratch1, Immediate(kFlatAsciiStringMask));
2145 andl(scratch2, Immediate(kFlatAsciiStringMask));
2147 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
2148 lea(scratch1, Operand(scratch1, scratch2,
times_8, 0));
2150 Immediate(kFlatAsciiStringTag + (kFlatAsciiStringTag << 3)));
2155 void MacroAssembler::JumpIfInstanceTypeIsNotSequentialAscii(
2156 Register instance_type,
2159 Label::Distance near_jump) {
2160 if (!scratch.is(instance_type)) {
2161 movl(scratch, instance_type);
2164 const int kFlatAsciiStringMask =
2167 andl(scratch, Immediate(kFlatAsciiStringMask));
2173 void MacroAssembler::JumpIfBothInstanceTypesAreNotSequentialAscii(
2174 Register first_object_instance_type,
2175 Register second_object_instance_type,
2179 Label::Distance near_jump) {
2181 movq(scratch1, first_object_instance_type);
2182 movq(scratch2, second_object_instance_type);
2186 const int kFlatAsciiStringMask =
2190 andl(scratch1, Immediate(kFlatAsciiStringMask));
2191 andl(scratch2, Immediate(kFlatAsciiStringMask));
2193 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
2194 lea(scratch1, Operand(scratch1, scratch2,
times_8, 0));
2196 Immediate(kFlatAsciiStringTag + (kFlatAsciiStringTag << 3)));
2202 void MacroAssembler::Move(Register dst, Register src) {
2209 void MacroAssembler::Move(Register dst, Handle<Object> source) {
2210 ASSERT(!source->IsFailure());
2211 if (source->IsSmi()) {
2212 Move(dst, Smi::cast(*source));
2214 movq(dst, source, RelocInfo::EMBEDDED_OBJECT);
2219 void MacroAssembler::Move(
const Operand& dst, Handle<Object> source) {
2220 ASSERT(!source->IsFailure());
2221 if (source->IsSmi()) {
2222 Move(dst, Smi::cast(*source));
2230 void MacroAssembler::Cmp(Register dst, Handle<Object> source) {
2231 if (source->IsSmi()) {
2232 Cmp(dst, Smi::cast(*source));
2240 void MacroAssembler::Cmp(
const Operand& dst, Handle<Object> source) {
2241 if (source->IsSmi()) {
2242 Cmp(dst, Smi::cast(*source));
2244 ASSERT(source->IsHeapObject());
2251 void MacroAssembler::Push(Handle<Object> source) {
2252 if (source->IsSmi()) {
2253 Push(Smi::cast(*source));
2255 ASSERT(source->IsHeapObject());
2262 void MacroAssembler::LoadHeapObject(Register result,
2263 Handle<HeapObject>
object) {
2264 if (isolate()->heap()->InNewSpace(*
object)) {
2265 Handle<JSGlobalPropertyCell> cell =
2266 isolate()->factory()->NewJSGlobalPropertyCell(
object);
2267 movq(result, cell, RelocInfo::GLOBAL_PROPERTY_CELL);
2268 movq(result, Operand(result, 0));
2270 Move(result,
object);
2275 void MacroAssembler::PushHeapObject(Handle<HeapObject>
object) {
2276 if (isolate()->heap()->InNewSpace(*
object)) {
2277 Handle<JSGlobalPropertyCell> cell =
2278 isolate()->factory()->NewJSGlobalPropertyCell(
object);
2288 void MacroAssembler::LoadGlobalCell(Register dst,
2289 Handle<JSGlobalPropertyCell> cell) {
2291 load_rax(cell.location(), RelocInfo::GLOBAL_PROPERTY_CELL);
2293 movq(dst, cell, RelocInfo::GLOBAL_PROPERTY_CELL);
2294 movq(dst, Operand(dst, 0));
2299 void MacroAssembler::Push(Smi* source) {
2300 intptr_t smi =
reinterpret_cast<intptr_t
>(source);
2302 push(Immediate(static_cast<int32_t>(smi)));
2304 Register constant = GetSmiConstant(source);
2310 void MacroAssembler::Drop(
int stack_elements) {
2311 if (stack_elements > 0) {
2317 void MacroAssembler::Test(
const Operand& src, Smi* source) {
2318 testl(Operand(src,
kIntSize), Immediate(source->value()));
2322 void MacroAssembler::TestBit(
const Operand& src,
int bits) {
2325 testb(Operand(src, byte_offset), Immediate(1 << bit_in_byte));
2329 void MacroAssembler::Jump(ExternalReference ext) {
2335 void MacroAssembler::Jump(
Address destination, RelocInfo::Mode rmode) {
2341 void MacroAssembler::Jump(Handle<Code> code_object, RelocInfo::Mode rmode) {
2343 jmp(code_object, rmode);
2347 int MacroAssembler::CallSize(ExternalReference ext) {
2349 const int kCallInstructionSize = 3;
2350 return LoadAddressSize(ext) + kCallInstructionSize;
2354 void MacroAssembler::Call(ExternalReference ext) {
2356 int end_position = pc_offset() + CallSize(ext);
2361 CHECK_EQ(end_position, pc_offset());
2366 void MacroAssembler::Call(
Address destination, RelocInfo::Mode rmode) {
2368 int end_position = pc_offset() + CallSize(destination, rmode);
2373 CHECK_EQ(pc_offset(), end_position);
2378 void MacroAssembler::Call(Handle<Code> code_object,
2379 RelocInfo::Mode rmode,
2382 int end_position = pc_offset() + CallSize(code_object);
2384 ASSERT(RelocInfo::IsCodeTarget(rmode));
2385 call(code_object, rmode, ast_id);
2387 CHECK_EQ(end_position, pc_offset());
2392 void MacroAssembler::Pushad() {
2412 lea(
rsp, Operand(
rsp, -sp_delta));
2416 void MacroAssembler::Popad() {
2420 lea(
rsp, Operand(
rsp, sp_delta));
2435 void MacroAssembler::Dropad() {
2463 void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Register src) {
2464 movq(SafepointRegisterSlot(dst), src);
2468 void MacroAssembler::LoadFromSafepointRegisterSlot(Register dst, Register src) {
2469 movq(dst, SafepointRegisterSlot(src));
2473 Operand MacroAssembler::SafepointRegisterSlot(Register reg) {
2474 return Operand(
rsp, SafepointRegisterStackIndex(reg.code()) *
kPointerSize);
2478 void MacroAssembler::PushTryHandler(StackHandler::Kind kind,
2479 int handler_index) {
2490 if (kind == StackHandler::JS_ENTRY) {
2495 Push(Smi::FromInt(0));
2503 StackHandler::IndexField::encode(handler_index) |
2504 StackHandler::KindField::encode(kind);
2505 push(Immediate(state));
2509 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
2510 push(ExternalOperand(handler_address));
2512 movq(ExternalOperand(handler_address),
rsp);
2516 void MacroAssembler::PopTryHandler() {
2518 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
2519 pop(ExternalOperand(handler_address));
2524 void MacroAssembler::JumpToHandlerEntry() {
2529 shr(
rdx, Immediate(StackHandler::kKindWidth));
2531 SmiToInteger64(
rdx,
rdx);
2537 void MacroAssembler::Throw(Register value) {
2547 if (!value.is(
rax)) {
2551 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
2552 movq(
rsp, ExternalOperand(handler_address));
2554 pop(ExternalOperand(handler_address));
2569 j(
zero, &skip, Label::kNear);
2570 movq(Operand(
rbp, StandardFrameConstants::kContextOffset),
rsi);
2573 JumpToHandlerEntry();
2577 void MacroAssembler::ThrowUncatchable(Register value) {
2587 if (!value.is(
rax)) {
2591 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
2595 Label fetch_next, check_kind;
2596 jmp(&check_kind, Label::kNear);
2598 movq(
rsp, Operand(
rsp, StackHandlerConstants::kNextOffset));
2602 testl(Operand(
rsp, StackHandlerConstants::kStateOffset),
2603 Immediate(StackHandler::KindField::kMask));
2607 pop(ExternalOperand(handler_address));
2617 JumpToHandlerEntry();
2621 void MacroAssembler::Ret() {
2626 void MacroAssembler::Ret(
int bytes_dropped, Register scratch) {
2631 addq(
rsp, Immediate(bytes_dropped));
2638 void MacroAssembler::FCmp() {
2644 void MacroAssembler::CmpObjectType(Register heap_object,
2647 movq(map,
FieldOperand(heap_object, HeapObject::kMapOffset));
2648 CmpInstanceType(map, type);
2652 void MacroAssembler::CmpInstanceType(Register map,
InstanceType type) {
2654 Immediate(static_cast<int8_t>(type)));
2658 void MacroAssembler::CheckFastElements(Register map,
2660 Label::Distance distance) {
2666 Immediate(Map::kMaximumBitField2FastHoleyElementValue));
2667 j(
above, fail, distance);
2671 void MacroAssembler::CheckFastObjectElements(Register map,
2673 Label::Distance distance) {
2679 Immediate(Map::kMaximumBitField2FastHoleySmiElementValue));
2682 Immediate(Map::kMaximumBitField2FastHoleyElementValue));
2683 j(
above, fail, distance);
2687 void MacroAssembler::CheckFastSmiElements(Register map,
2689 Label::Distance distance) {
2693 Immediate(Map::kMaximumBitField2FastHoleySmiElementValue));
2694 j(
above, fail, distance);
2698 void MacroAssembler::StoreNumberToDoubleElements(
2699 Register maybe_number,
2702 XMMRegister xmm_scratch,
2704 Label smi_value, is_nan, maybe_nan, not_nan, have_double_value, done;
2706 JumpIfSmi(maybe_number, &smi_value, Label::kNear);
2708 CheckMap(maybe_number,
2709 isolate()->factory()->heap_number_map(),
2720 movsd(xmm_scratch,
FieldOperand(maybe_number, HeapNumber::kValueOffset));
2721 bind(&have_double_value);
2729 j(
greater, &is_nan, Label::kNear);
2730 cmpl(
FieldOperand(maybe_number, HeapNumber::kValueOffset), Immediate(0));
2736 FixedDoubleArray::canonical_not_the_hole_nan_as_double()));
2738 jmp(&have_double_value, Label::kNear);
2751 void MacroAssembler::CompareMap(Register obj,
2753 Label* early_success,
2760 Map* current_map = *map;
2763 current_map = current_map->LookupElementsTransitionMap(kind);
2764 if (!current_map)
break;
2765 j(
equal, early_success, Label::kNear);
2767 Handle<Map>(current_map));
2774 void MacroAssembler::CheckMap(Register obj,
2780 JumpIfSmi(obj, fail);
2784 CompareMap(obj, map, &success, mode);
2790 void MacroAssembler::ClampUint8(Register reg) {
2792 testl(reg, Immediate(0xFFFFFF00));
2793 j(
zero, &done, Label::kNear);
2794 setcc(negative, reg);
2800 void MacroAssembler::ClampDoubleToUint8(XMMRegister input_reg,
2801 XMMRegister temp_xmm_reg,
2802 Register result_reg,
2803 Register temp_reg) {
2806 xorps(temp_xmm_reg, temp_xmm_reg);
2807 ucomisd(input_reg, temp_xmm_reg);
2808 j(
below, &done, Label::kNear);
2809 uint64_t one_half = BitCast<uint64_t, double>(0.5);
2810 Set(temp_reg, one_half);
2811 movq(temp_xmm_reg, temp_reg);
2812 addsd(temp_xmm_reg, input_reg);
2813 cvttsd2si(result_reg, temp_xmm_reg);
2814 testl(result_reg, Immediate(0xFFFFFF00));
2815 j(
zero, &done, Label::kNear);
2816 Set(result_reg, 255);
2821 void MacroAssembler::LoadInstanceDescriptors(Register map,
2822 Register descriptors) {
2824 Map::kInstanceDescriptorsOrBitField3Offset));
2826 JumpIfNotSmi(descriptors, ¬_smi, Label::kNear);
2827 Move(descriptors, isolate()->factory()->empty_descriptor_array());
2832 void MacroAssembler::DispatchMap(Register obj,
2834 Handle<Code> success,
2838 JumpIfSmi(obj, &fail);
2841 j(
equal, success, RelocInfo::CODE_TARGET);
2847 void MacroAssembler::AbortIfNotNumber(Register
object) {
2850 j(is_smi, &ok, Label::kNear);
2852 isolate()->factory()->heap_number_map());
2853 Assert(
equal,
"Operand not a number");
2858 void MacroAssembler::AbortIfSmi(Register
object) {
2864 void MacroAssembler::AbortIfNotSmi(Register
object) {
2866 Assert(is_smi,
"Operand is not a smi");
2870 void MacroAssembler::AbortIfNotSmi(
const Operand&
object) {
2872 Assert(is_smi,
"Operand is not a smi");
2876 void MacroAssembler::AbortIfNotZeroExtended(Register int32_register) {
2880 Assert(
above_equal,
"32 bit value in register is not zero-extended");
2884 void MacroAssembler::AbortIfNotString(Register
object) {
2886 Assert(
not_equal,
"Operand is not a string");
2888 movq(
object,
FieldOperand(
object, HeapObject::kMapOffset));
2891 Assert(
below,
"Operand is not a string");
2895 void MacroAssembler::AbortIfNotRootValue(Register src,
2896 Heap::RootListIndex root_value_index,
2897 const char* message) {
2901 Check(
equal, message);
2906 Condition MacroAssembler::IsObjectStringType(Register heap_object,
2908 Register instance_type) {
2909 movq(map,
FieldOperand(heap_object, HeapObject::kMapOffset));
2910 movzxbl(instance_type,
FieldOperand(map, Map::kInstanceTypeOffset));
2917 void MacroAssembler::TryGetFunctionPrototype(Register
function,
2920 bool miss_on_bound_function) {
2929 if (miss_on_bound_function) {
2931 FieldOperand(
function, JSFunction::kSharedFunctionInfoOffset));
2935 SharedFunctionInfo::kCompilerHintsOffset),
2936 SharedFunctionInfo::kBoundFunction);
2943 Immediate(1 << Map::kHasNonInstancePrototype));
2944 j(
not_zero, &non_instance, Label::kNear);
2948 FieldOperand(
function, JSFunction::kPrototypeOrInitialMapOffset));
2953 CompareRoot(result, Heap::kTheHoleValueRootIndex);
2962 movq(result,
FieldOperand(result, Map::kPrototypeOffset));
2963 jmp(&done, Label::kNear);
2967 bind(&non_instance);
2968 movq(result,
FieldOperand(result, Map::kConstructorOffset));
2975 void MacroAssembler::SetCounter(StatsCounter* counter,
int value) {
2976 if (FLAG_native_code_counters && counter->Enabled()) {
2977 Operand counter_operand = ExternalOperand(ExternalReference(counter));
2978 movl(counter_operand, Immediate(value));
2983 void MacroAssembler::IncrementCounter(StatsCounter* counter,
int value) {
2985 if (FLAG_native_code_counters && counter->Enabled()) {
2986 Operand counter_operand = ExternalOperand(ExternalReference(counter));
2988 incl(counter_operand);
2990 addl(counter_operand, Immediate(value));
2996 void MacroAssembler::DecrementCounter(StatsCounter* counter,
int value) {
2998 if (FLAG_native_code_counters && counter->Enabled()) {
2999 Operand counter_operand = ExternalOperand(ExternalReference(counter));
3001 decl(counter_operand);
3003 subl(counter_operand, Immediate(value));
3009 #ifdef ENABLE_DEBUGGER_SUPPORT
3010 void MacroAssembler::DebugBreak() {
3012 LoadAddress(
rbx, ExternalReference(Runtime::kDebugBreak, isolate()));
3014 ASSERT(AllowThisStubCall(&ces));
3017 #endif // ENABLE_DEBUGGER_SUPPORT
3020 void MacroAssembler::SetCallKind(Register dst,
CallKind call_kind) {
3027 LoadSmiConstant(dst, Smi::FromInt(1));
3029 LoadSmiConstant(dst, Smi::FromInt(0));
3034 void MacroAssembler::InvokeCode(Register code,
3035 const ParameterCount& expected,
3036 const ParameterCount& actual,
3038 const CallWrapper& call_wrapper,
3044 bool definitely_mismatches =
false;
3045 InvokePrologue(expected,
3047 Handle<Code>::null(),
3050 &definitely_mismatches,
3055 if (!definitely_mismatches) {
3057 call_wrapper.BeforeCall(CallSize(code));
3058 SetCallKind(
rcx, call_kind);
3060 call_wrapper.AfterCall();
3063 SetCallKind(
rcx, call_kind);
3071 void MacroAssembler::InvokeCode(Handle<Code> code,
3072 const ParameterCount& expected,
3073 const ParameterCount& actual,
3074 RelocInfo::Mode rmode,
3076 const CallWrapper& call_wrapper,
3082 bool definitely_mismatches =
false;
3083 Register dummy =
rax;
3084 InvokePrologue(expected,
3089 &definitely_mismatches,
3094 if (!definitely_mismatches) {
3096 call_wrapper.BeforeCall(CallSize(code));
3097 SetCallKind(
rcx, call_kind);
3099 call_wrapper.AfterCall();
3102 SetCallKind(
rcx, call_kind);
3110 void MacroAssembler::InvokeFunction(Register
function,
3111 const ParameterCount& actual,
3113 const CallWrapper& call_wrapper,
3119 movq(
rdx,
FieldOperand(
function, JSFunction::kSharedFunctionInfoOffset));
3127 ParameterCount expected(
rbx);
3128 InvokeCode(
rdx, expected, actual, flag, call_wrapper, call_kind);
3132 void MacroAssembler::InvokeFunction(Handle<JSFunction>
function,
3133 const ParameterCount& actual,
3135 const CallWrapper& call_wrapper,
3141 LoadHeapObject(
rdi,
function);
3148 ParameterCount expected(function->shared()->formal_parameter_count());
3149 InvokeCode(
rdx, expected, actual, flag, call_wrapper, call_kind);
3153 void MacroAssembler::InvokePrologue(
const ParameterCount& expected,
3154 const ParameterCount& actual,
3155 Handle<Code> code_constant,
3156 Register code_register,
3158 bool* definitely_mismatches,
3160 Label::Distance near_jump,
3161 const CallWrapper& call_wrapper,
3163 bool definitely_matches =
false;
3164 *definitely_mismatches =
false;
3166 if (expected.is_immediate()) {
3167 ASSERT(actual.is_immediate());
3168 if (expected.immediate() == actual.immediate()) {
3169 definitely_matches =
true;
3171 Set(
rax, actual.immediate());
3172 if (expected.immediate() ==
3173 SharedFunctionInfo::kDontAdaptArgumentsSentinel) {
3178 definitely_matches =
true;
3180 *definitely_mismatches =
true;
3181 Set(
rbx, expected.immediate());
3185 if (actual.is_immediate()) {
3189 cmpq(expected.reg(), Immediate(actual.immediate()));
3190 j(
equal, &invoke, Label::kNear);
3192 Set(
rax, actual.immediate());
3193 }
else if (!expected.reg().is(actual.reg())) {
3196 cmpq(expected.reg(), actual.reg());
3197 j(
equal, &invoke, Label::kNear);
3203 if (!definitely_matches) {
3204 Handle<Code> adaptor = isolate()->builtins()->ArgumentsAdaptorTrampoline();
3205 if (!code_constant.is_null()) {
3206 movq(
rdx, code_constant, RelocInfo::EMBEDDED_OBJECT);
3208 }
else if (!code_register.is(
rdx)) {
3209 movq(
rdx, code_register);
3213 call_wrapper.BeforeCall(CallSize(adaptor));
3214 SetCallKind(
rcx, call_kind);
3215 Call(adaptor, RelocInfo::CODE_TARGET);
3216 call_wrapper.AfterCall();
3217 if (!*definitely_mismatches) {
3218 jmp(done, near_jump);
3221 SetCallKind(
rcx, call_kind);
3222 Jump(adaptor, RelocInfo::CODE_TARGET);
3229 void MacroAssembler::EnterFrame(StackFrame::Type type) {
3233 Push(Smi::FromInt(type));
3236 if (emit_debug_code()) {
3238 isolate()->factory()->undefined_value(),
3239 RelocInfo::EMBEDDED_OBJECT);
3241 Check(
not_equal,
"code object not properly patched");
3246 void MacroAssembler::LeaveFrame(StackFrame::Type type) {
3247 if (emit_debug_code()) {
3250 Check(
equal,
"stack frame types must match");
3257 void MacroAssembler::EnterExitFramePrologue(
bool save_rax) {
3277 Store(ExternalReference(Isolate::kCEntryFPAddress, isolate()),
rbp);
3278 Store(ExternalReference(Isolate::kContextAddress, isolate()),
rsi);
3282 void MacroAssembler::EnterExitFrameEpilogue(
int arg_stack_space,
3283 bool save_doubles) {
3285 const int kShadowSpace = 4;
3286 arg_stack_space += kShadowSpace;
3290 int space = XMMRegister::kNumRegisters *
kDoubleSize +
3292 subq(
rsp, Immediate(space));
3294 for (
int i = 0; i < XMMRegister::kNumAllocatableRegisters; i++) {
3295 XMMRegister reg = XMMRegister::FromAllocationIndex(i);
3298 }
else if (arg_stack_space > 0) {
3299 subq(
rsp, Immediate(arg_stack_space * kPointerSize));
3303 const int kFrameAlignment = OS::ActivationFrameAlignment();
3304 if (kFrameAlignment > 0) {
3307 and_(
rsp, Immediate(-kFrameAlignment));
3311 movq(Operand(
rbp, ExitFrameConstants::kSPOffset),
rsp);
3315 void MacroAssembler::EnterExitFrame(
int arg_stack_space,
bool save_doubles) {
3316 EnterExitFramePrologue(
true);
3320 int offset = StandardFrameConstants::kCallerSPOffset -
kPointerSize;
3323 EnterExitFrameEpilogue(arg_stack_space, save_doubles);
3327 void MacroAssembler::EnterApiExitFrame(
int arg_stack_space) {
3328 EnterExitFramePrologue(
false);
3329 EnterExitFrameEpilogue(arg_stack_space,
false);
3333 void MacroAssembler::LeaveExitFrame(
bool save_doubles) {
3338 for (
int i = 0; i < XMMRegister::kNumAllocatableRegisters; i++) {
3339 XMMRegister reg = XMMRegister::FromAllocationIndex(i);
3344 movq(
rcx, Operand(
rbp, 1 * kPointerSize));
3345 movq(
rbp, Operand(
rbp, 0 * kPointerSize));
3349 lea(
rsp, Operand(
r15, 1 * kPointerSize));
3354 LeaveExitFrameEpilogue();
3358 void MacroAssembler::LeaveApiExitFrame() {
3362 LeaveExitFrameEpilogue();
3366 void MacroAssembler::LeaveExitFrameEpilogue() {
3368 ExternalReference context_address(Isolate::kContextAddress, isolate());
3369 Operand context_operand = ExternalOperand(context_address);
3370 movq(
rsi, context_operand);
3372 movq(context_operand, Immediate(0));
3376 ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress,
3378 Operand c_entry_fp_operand = ExternalOperand(c_entry_fp_address);
3379 movq(c_entry_fp_operand, Immediate(0));
3383 void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
3386 Label same_contexts;
3388 ASSERT(!holder_reg.is(scratch));
3391 movq(scratch, Operand(
rbp, StandardFrameConstants::kContextOffset));
3394 if (emit_debug_code()) {
3395 cmpq(scratch, Immediate(0));
3396 Check(
not_equal,
"we should not have an empty lexical context");
3399 int offset = Context::kHeaderSize + Context::GLOBAL_INDEX *
kPointerSize;
3401 movq(scratch,
FieldOperand(scratch, GlobalObject::kGlobalContextOffset));
3404 if (emit_debug_code()) {
3406 isolate()->factory()->global_context_map());
3407 Check(
equal,
"JSGlobalObject::global_context should be a global context.");
3411 cmpq(scratch,
FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
3412 j(
equal, &same_contexts);
3420 if (emit_debug_code()) {
3423 movq(holder_reg,
FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
3424 CompareRoot(holder_reg, Heap::kNullValueRootIndex);
3425 Check(
not_equal,
"JSGlobalProxy::context() should not be null.");
3428 movq(holder_reg,
FieldOperand(holder_reg, HeapObject::kMapOffset));
3429 CompareRoot(holder_reg, Heap::kGlobalContextMapRootIndex);
3430 Check(
equal,
"JSGlobalObject::global_context should be a global context.");
3435 FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
3437 Context::kHeaderSize + Context::SECURITY_TOKEN_INDEX *
kPointerSize;
3442 bind(&same_contexts);
3446 void MacroAssembler::GetNumberHash(Register
r0, Register scratch) {
3448 LoadRoot(scratch, Heap::kHashSeedRootIndex);
3449 SmiToInteger32(scratch, scratch);
3460 shll(scratch, Immediate(15));
3464 shrl(scratch, Immediate(12));
3467 leal(r0, Operand(r0, r0,
times_4, 0));
3470 shrl(scratch, Immediate(4));
3473 imull(r0, r0, Immediate(2057));
3476 shrl(scratch, Immediate(16));
3482 void MacroAssembler::LoadFromNumberDictionary(Label* miss,
3512 GetNumberHash(r0, r1);
3516 SeededNumberDictionary::kCapacityOffset));
3520 const int kProbes = 4;
3521 for (
int i = 0; i < kProbes; i++) {
3526 addl(r2, Immediate(SeededNumberDictionary::GetProbeOffset(i)));
3531 ASSERT(SeededNumberDictionary::kEntrySize == 3);
3532 lea(r2, Operand(r2, r2,
times_2, 0));
3538 SeededNumberDictionary::kElementsStartOffset));
3539 if (i != (kProbes - 1)) {
3548 const int kDetailsOffset =
3549 SeededNumberDictionary::kElementsStartOffset + 2 *
kPointerSize;
3552 Smi::FromInt(PropertyDetails::TypeField::kMask));
3556 const int kValueOffset =
3557 SeededNumberDictionary::kElementsStartOffset +
kPointerSize;
3562 void MacroAssembler::LoadAllocationTopHelper(Register result,
3565 ExternalReference new_space_allocation_top =
3566 ExternalReference::new_space_allocation_top_address(isolate());
3571 ASSERT(!scratch.is_valid());
3574 Operand top_operand = ExternalOperand(new_space_allocation_top);
3575 cmpq(result, top_operand);
3576 Check(
equal,
"Unexpected allocation top");
3583 if (scratch.is_valid()) {
3584 LoadAddress(scratch, new_space_allocation_top);
3585 movq(result, Operand(scratch, 0));
3587 Load(result, new_space_allocation_top);
3592 void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
3594 if (emit_debug_code()) {
3596 Check(
zero,
"Unaligned allocation in new space");
3599 ExternalReference new_space_allocation_top =
3600 ExternalReference::new_space_allocation_top_address(isolate());
3603 if (scratch.is_valid()) {
3605 movq(Operand(scratch, 0), result_end);
3607 Store(new_space_allocation_top, result_end);
3612 void MacroAssembler::AllocateInNewSpace(
int object_size,
3614 Register result_end,
3618 if (!FLAG_inline_new) {
3619 if (emit_debug_code()) {
3621 movl(result, Immediate(0x7091));
3622 if (result_end.is_valid()) {
3623 movl(result_end, Immediate(0x7191));
3625 if (scratch.is_valid()) {
3626 movl(scratch, Immediate(0x7291));
3632 ASSERT(!result.is(result_end));
3635 LoadAllocationTopHelper(result, scratch, flags);
3638 ExternalReference new_space_allocation_limit =
3639 ExternalReference::new_space_allocation_limit_address(isolate());
3641 Register top_reg = result_end.is_valid() ? result_end : result;
3643 if (!top_reg.is(result)) {
3644 movq(top_reg, result);
3646 addq(top_reg, Immediate(object_size));
3647 j(
carry, gc_required);
3648 Operand limit_operand = ExternalOperand(new_space_allocation_limit);
3649 cmpq(top_reg, limit_operand);
3650 j(
above, gc_required);
3653 UpdateAllocationTopHelper(top_reg, scratch);
3655 if (top_reg.is(result)) {
3659 subq(result, Immediate(object_size));
3661 }
else if ((flags & TAG_OBJECT) != 0) {
3668 void MacroAssembler::AllocateInNewSpace(
int header_size,
3670 Register element_count,
3672 Register result_end,
3676 if (!FLAG_inline_new) {
3677 if (emit_debug_code()) {
3679 movl(result, Immediate(0x7091));
3680 movl(result_end, Immediate(0x7191));
3681 if (scratch.is_valid()) {
3682 movl(scratch, Immediate(0x7291));
3689 ASSERT(!result.is(result_end));
3692 LoadAllocationTopHelper(result, scratch, flags);
3695 ExternalReference new_space_allocation_limit =
3696 ExternalReference::new_space_allocation_limit_address(isolate());
3700 lea(result_end, Operand(element_count, element_size, header_size));
3701 addq(result_end, result);
3702 j(
carry, gc_required);
3703 Operand limit_operand = ExternalOperand(new_space_allocation_limit);
3704 cmpq(result_end, limit_operand);
3705 j(
above, gc_required);
3708 UpdateAllocationTopHelper(result_end, scratch);
3711 if ((flags & TAG_OBJECT) != 0) {
3717 void MacroAssembler::AllocateInNewSpace(Register object_size,
3719 Register result_end,
3723 if (!FLAG_inline_new) {
3724 if (emit_debug_code()) {
3726 movl(result, Immediate(0x7091));
3727 movl(result_end, Immediate(0x7191));
3728 if (scratch.is_valid()) {
3729 movl(scratch, Immediate(0x7291));
3736 ASSERT(!result.is(result_end));
3739 LoadAllocationTopHelper(result, scratch, flags);
3742 ExternalReference new_space_allocation_limit =
3743 ExternalReference::new_space_allocation_limit_address(isolate());
3744 if (!object_size.is(result_end)) {
3745 movq(result_end, object_size);
3747 addq(result_end, result);
3748 j(
carry, gc_required);
3749 Operand limit_operand = ExternalOperand(new_space_allocation_limit);
3750 cmpq(result_end, limit_operand);
3751 j(
above, gc_required);
3754 UpdateAllocationTopHelper(result_end, scratch);
3757 if ((flags & TAG_OBJECT) != 0) {
3763 void MacroAssembler::UndoAllocationInNewSpace(Register
object) {
3764 ExternalReference new_space_allocation_top =
3765 ExternalReference::new_space_allocation_top_address(isolate());
3769 Operand top_operand = ExternalOperand(new_space_allocation_top);
3771 cmpq(
object, top_operand);
3772 Check(
below,
"Undo allocation of non allocated memory");
3774 movq(top_operand,
object);
3778 void MacroAssembler::AllocateHeapNumber(Register result,
3780 Label* gc_required) {
3782 AllocateInNewSpace(HeapNumber::kSize,
3795 void MacroAssembler::AllocateTwoByteString(Register result,
3800 Label* gc_required) {
3803 const int kHeaderAlignment = SeqTwoByteString::kHeaderSize &
3807 lea(scratch1, Operand(length, length,
times_1, kObjectAlignmentMask +
3809 and_(scratch1, Immediate(~kObjectAlignmentMask));
3810 if (kHeaderAlignment > 0) {
3811 subq(scratch1, Immediate(kHeaderAlignment));
3815 AllocateInNewSpace(SeqTwoByteString::kHeaderSize,
3827 Integer32ToSmi(scratch1, length);
3828 movq(
FieldOperand(result, String::kLengthOffset), scratch1);
3830 Immediate(String::kEmptyHashField));
3834 void MacroAssembler::AllocateAsciiString(Register result,
3839 Label* gc_required) {
3842 const int kHeaderAlignment = SeqAsciiString::kHeaderSize &
3844 movl(scratch1, length);
3846 addq(scratch1, Immediate(kObjectAlignmentMask + kHeaderAlignment));
3847 and_(scratch1, Immediate(~kObjectAlignmentMask));
3848 if (kHeaderAlignment > 0) {
3849 subq(scratch1, Immediate(kHeaderAlignment));
3853 AllocateInNewSpace(SeqAsciiString::kHeaderSize,
3865 Integer32ToSmi(scratch1, length);
3866 movq(
FieldOperand(result, String::kLengthOffset), scratch1);
3868 Immediate(String::kEmptyHashField));
3872 void MacroAssembler::AllocateTwoByteConsString(Register result,
3875 Label* gc_required) {
3877 AllocateInNewSpace(ConsString::kSize,
3890 void MacroAssembler::AllocateAsciiConsString(Register result,
3893 Label* gc_required) {
3895 AllocateInNewSpace(ConsString::kSize,
3908 void MacroAssembler::AllocateTwoByteSlicedString(Register result,
3911 Label* gc_required) {
3913 AllocateInNewSpace(SlicedString::kSize,
3926 void MacroAssembler::AllocateAsciiSlicedString(Register result,
3929 Label* gc_required) {
3931 AllocateInNewSpace(SlicedString::kSize,
3951 void MacroAssembler::CopyBytes(Register destination,
3957 if (FLAG_debug_code) {
3958 cmpl(length, Immediate(min_length));
3961 Label loop, done, short_string, short_loop;
3963 const int kLongStringLimit = 20;
3964 if (min_length <= kLongStringLimit) {
3965 cmpl(length, Immediate(kLongStringLimit));
3976 movq(scratch, length);
3977 shrl(length, Immediate(3));
3980 andl(scratch, Immediate(0x7));
3981 movq(length, Operand(source, scratch,
times_1, -8));
3982 movq(Operand(destination, scratch,
times_1, -8), length);
3983 addq(destination, scratch);
3985 if (min_length <= kLongStringLimit) {
3988 bind(&short_string);
3989 if (min_length == 0) {
3990 testl(length, length);
3993 lea(scratch, Operand(destination, length,
times_1, 0));
3996 movb(length, Operand(source, 0));
3997 movb(Operand(destination, 0), length);
4000 cmpq(destination, scratch);
4008 void MacroAssembler::InitializeFieldsWithFiller(Register start_offset,
4009 Register end_offset,
4014 movq(Operand(start_offset, 0), filler);
4015 addq(start_offset, Immediate(kPointerSize));
4017 cmpq(start_offset, end_offset);
4022 void MacroAssembler::LoadContext(Register dst,
int context_chain_length) {
4023 if (context_chain_length > 0) {
4025 movq(dst, Operand(
rsi, Context::SlotOffset(Context::PREVIOUS_INDEX)));
4026 for (
int i = 1; i < context_chain_length; i++) {
4027 movq(dst, Operand(dst, Context::SlotOffset(Context::PREVIOUS_INDEX)));
4040 if (emit_debug_code()) {
4042 Heap::kWithContextMapRootIndex);
4043 Check(
not_equal,
"Variable resolved to with context.");
4048 void MacroAssembler::LoadTransitionedArrayMapConditional(
4051 Register map_in_out,
4053 Label* no_map_match) {
4055 movq(scratch, Operand(
rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
4056 movq(scratch,
FieldOperand(scratch, GlobalObject::kGlobalContextOffset));
4059 movq(scratch, Operand(scratch,
4060 Context::SlotOffset(Context::JS_ARRAY_MAPS_INDEX)));
4062 int offset = expected_kind * kPointerSize +
4063 FixedArrayBase::kHeaderSize;
4068 offset = transitioned_kind * kPointerSize +
4069 FixedArrayBase::kHeaderSize;
4074 void MacroAssembler::LoadInitialArrayMap(
4075 Register function_in, Register scratch,
4076 Register map_out,
bool can_have_holes) {
4077 ASSERT(!function_in.is(map_out));
4080 JSFunction::kPrototypeOrInitialMapOffset));
4081 if (!FLAG_smi_only_arrays) {
4088 }
else if (can_have_holes) {
4099 static const int kRegisterPassedArguments = 4;
4101 static const int kRegisterPassedArguments = 6;
4104 void MacroAssembler::LoadGlobalFunction(
int index, Register
function) {
4106 movq(
function, Operand(
rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
4108 movq(
function,
FieldOperand(
function, GlobalObject::kGlobalContextOffset));
4110 movq(
function, Operand(
function, Context::SlotOffset(index)));
4114 void MacroAssembler::LoadGlobalFunctionInitialMap(Register
function,
4117 movq(map,
FieldOperand(
function, JSFunction::kPrototypeOrInitialMapOffset));
4118 if (emit_debug_code()) {
4120 CheckMap(map, isolate()->factory()->meta_map(), &fail,
DO_SMI_CHECK);
4123 Abort(
"Global functions must have initial map");
4129 int MacroAssembler::ArgumentStackSlotsForCFunctionCall(
int num_arguments) {
4136 ASSERT(num_arguments >= 0);
4138 const int kMinimumStackSlots = kRegisterPassedArguments;
4139 if (num_arguments < kMinimumStackSlots)
return kMinimumStackSlots;
4140 return num_arguments;
4142 if (num_arguments < kRegisterPassedArguments)
return 0;
4143 return num_arguments - kRegisterPassedArguments;
4148 void MacroAssembler::PrepareCallCFunction(
int num_arguments) {
4149 int frame_alignment = OS::ActivationFrameAlignment();
4150 ASSERT(frame_alignment != 0);
4151 ASSERT(num_arguments >= 0);
4156 int argument_slots_on_stack =
4157 ArgumentStackSlotsForCFunctionCall(num_arguments);
4158 subq(
rsp, Immediate((argument_slots_on_stack + 1) * kPointerSize));
4159 and_(
rsp, Immediate(-frame_alignment));
4164 void MacroAssembler::CallCFunction(ExternalReference
function,
4165 int num_arguments) {
4166 LoadAddress(
rax,
function);
4167 CallCFunction(
rax, num_arguments);
4171 void MacroAssembler::CallCFunction(Register
function,
int num_arguments) {
4174 if (emit_debug_code()) {
4175 CheckStackAlignment();
4179 ASSERT(OS::ActivationFrameAlignment() != 0);
4180 ASSERT(num_arguments >= 0);
4181 int argument_slots_on_stack =
4182 ArgumentStackSlotsForCFunctionCall(num_arguments);
4183 movq(
rsp, Operand(
rsp, argument_slots_on_stack * kPointerSize));
4187 bool AreAliased(Register r1, Register r2, Register
r3, Register
r4) {
4188 if (r1.is(r2))
return true;
4189 if (r1.is(r3))
return true;
4190 if (r1.is(r4))
return true;
4191 if (r2.is(r3))
return true;
4192 if (r2.is(r4))
return true;
4193 if (r3.is(r4))
return true;
4198 CodePatcher::CodePatcher(
byte* address,
int size)
4199 : address_(address),
4201 masm_(
NULL, address, size + Assembler::kGap) {
4205 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
4209 CodePatcher::~CodePatcher() {
4211 CPU::FlushICache(address_, size_);
4214 ASSERT(masm_.pc_ == address_ + size_);
4215 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
4219 void MacroAssembler::CheckPageFlag(
4224 Label* condition_met,
4225 Label::Distance condition_met_distance) {
4227 if (scratch.is(
object)) {
4228 and_(scratch, Immediate(~Page::kPageAlignmentMask));
4230 movq(scratch, Immediate(~Page::kPageAlignmentMask));
4231 and_(scratch,
object);
4234 testb(Operand(scratch, MemoryChunk::kFlagsOffset),
4235 Immediate(static_cast<uint8_t>(mask)));
4237 testl(Operand(scratch, MemoryChunk::kFlagsOffset), Immediate(mask));
4239 j(cc, condition_met, condition_met_distance);
4243 void MacroAssembler::JumpIfBlack(Register
object,
4244 Register bitmap_scratch,
4245 Register mask_scratch,
4247 Label::Distance on_black_distance) {
4249 GetMarkBits(
object, bitmap_scratch, mask_scratch);
4251 ASSERT(strcmp(Marking::kBlackBitPattern,
"10") == 0);
4254 movq(
rcx, mask_scratch);
4257 lea(
rcx, Operand(mask_scratch, mask_scratch,
times_2, 0));
4259 and_(
rcx, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
4260 cmpq(mask_scratch,
rcx);
4261 j(
equal, on_black, on_black_distance);
4268 void MacroAssembler::JumpIfDataObject(
4271 Label* not_data_object,
4272 Label::Distance not_data_object_distance) {
4273 Label is_data_object;
4274 movq(scratch,
FieldOperand(value, HeapObject::kMapOffset));
4275 CompareRoot(scratch, Heap::kHeapNumberMapRootIndex);
4276 j(
equal, &is_data_object, Label::kNear);
4283 j(
not_zero, not_data_object, not_data_object_distance);
4284 bind(&is_data_object);
4288 void MacroAssembler::GetMarkBits(Register addr_reg,
4289 Register bitmap_reg,
4290 Register mask_reg) {
4292 movq(bitmap_reg, addr_reg);
4294 and_(bitmap_reg, Immediate(~Page::kPageAlignmentMask));
4295 movq(
rcx, addr_reg);
4298 shrl(
rcx, Immediate(shift));
4300 Immediate((Page::kPageAlignmentMask >> shift) &
4301 ~(Bitmap::kBytesPerCell - 1)));
4303 addq(bitmap_reg,
rcx);
4304 movq(
rcx, addr_reg);
4306 and_(
rcx, Immediate((1 << Bitmap::kBitsPerCellLog2) - 1));
4307 movl(mask_reg, Immediate(1));
4312 void MacroAssembler::EnsureNotWhite(
4314 Register bitmap_scratch,
4315 Register mask_scratch,
4316 Label* value_is_white_and_not_data,
4317 Label::Distance distance) {
4319 GetMarkBits(value, bitmap_scratch, mask_scratch);
4322 ASSERT(strcmp(Marking::kWhiteBitPattern,
"00") == 0);
4323 ASSERT(strcmp(Marking::kBlackBitPattern,
"10") == 0);
4324 ASSERT(strcmp(Marking::kGreyBitPattern,
"11") == 0);
4325 ASSERT(strcmp(Marking::kImpossibleBitPattern,
"01") == 0);
4331 testq(Operand(bitmap_scratch, MemoryChunk::kHeaderSize), mask_scratch);
4334 if (FLAG_debug_code) {
4339 addq(mask_scratch, mask_scratch);
4340 testq(Operand(bitmap_scratch, MemoryChunk::kHeaderSize), mask_scratch);
4341 j(
zero, &ok, Label::kNear);
4350 Register length =
rcx;
4351 Label not_heap_number;
4352 Label is_data_object;
4355 movq(map,
FieldOperand(value, HeapObject::kMapOffset));
4356 CompareRoot(map, Heap::kHeapNumberMapRootIndex);
4357 j(
not_equal, ¬_heap_number, Label::kNear);
4358 movq(length, Immediate(HeapNumber::kSize));
4359 jmp(&is_data_object, Label::kNear);
4361 bind(¬_heap_number);
4367 Register instance_type =
rcx;
4368 movzxbl(instance_type,
FieldOperand(map, Map::kInstanceTypeOffset));
4370 j(
not_zero, value_is_white_and_not_data);
4380 j(
zero, ¬_external, Label::kNear);
4381 movq(length, Immediate(ExternalString::kSize));
4382 jmp(&is_data_object, Label::kNear);
4384 bind(¬_external);
4389 addq(length, Immediate(0x04));
4391 imul(length,
FieldOperand(value, String::kLengthOffset));
4393 addq(length, Immediate(SeqString::kHeaderSize + kObjectAlignmentMask));
4394 and_(length, Immediate(~kObjectAlignmentMask));
4396 bind(&is_data_object);
4399 or_(Operand(bitmap_scratch, MemoryChunk::kHeaderSize), mask_scratch);
4401 and_(bitmap_scratch, Immediate(~Page::kPageAlignmentMask));
4402 addl(Operand(bitmap_scratch, MemoryChunk::kLiveBytesOffset), length);
4408 void MacroAssembler::CheckEnumCache(Register null_value, Label* call_runtime) {
4410 Register empty_fixed_array_value =
r8;
4411 LoadRoot(empty_fixed_array_value, Heap::kEmptyFixedArrayRootIndex);
4412 Register empty_descriptor_array_value =
r9;
4413 LoadRoot(empty_descriptor_array_value,
4414 Heap::kEmptyDescriptorArrayRootIndex);
4420 cmpq(empty_fixed_array_value,
4429 JumpIfSmi(
rdx, call_runtime);
4435 JumpIfSmi(
rdx, call_runtime);
4438 Label check_prototype;
4440 j(
equal, &check_prototype, Label::kNear);
4442 cmpq(
rdx, empty_fixed_array_value);
4446 bind(&check_prototype);
4448 cmpq(
rcx, null_value);
4455 #endif // V8_TARGET_ARCH_X64
const intptr_t kSmiTagMask
#define CHECK_EQ(expected, value)
const uint32_t kNaNOrInfinityLowerBoundUpper32
#define ASSERT_NOT_NULL(p)
const int kNumSafepointSavedRegisters
bool AreAliased(Register r1, Register r2, Register r3, Register r4)
#define ASSERT(condition)
bool CanTransitionToMoreGeneralFastElementsKind(ElementsKind elements_kind, bool allow_only_packed)
const int kPointerSizeLog2
const uint32_t kStringRepresentationMask
bool is_uint32(int64_t x)
const intptr_t kObjectAlignmentMask
bool IsFastElementsKind(ElementsKind kind)
const intptr_t kHeapObjectTagMask
const Register kRootRegister
const uint32_t kNotStringTag
bool IsFastPackedElementsKind(ElementsKind kind)
const uint32_t kIsIndirectStringMask
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination trace on stack replacement optimize closures functions with arguments object optimize functions containing for in loops profiler considers IC stability primitive functions trigger their own optimization re try self optimization if it failed insert an interrupt check at function exit execution budget before interrupt is triggered call count before self optimization self_optimization count_based_interrupts weighted_back_edges trace_opt emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 enable use of ARMv7 instructions if enable use of MIPS FPU instructions if NULL
Operand FieldOperand(Register object, int offset)
bool IsAligned(T value, U alignment)
const uint32_t kHoleNanLower32
Operand StackSpaceOperand(int index)
int TenToThe(int exponent)
const int kRootRegisterBias
MacroAssembler(Isolate *isolate, void *buffer, int size)
const uint32_t kStringTag
const uint32_t kIsNotStringMask
const int kNumSafepointRegisters
const Register kScratchRegister
v8::Handle< v8::Value > Load(const v8::Arguments &args)
ElementsKind GetNextMoreGeneralFastElementsKind(ElementsKind elements_kind, bool allow_only_packed)
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset kAccessCheckInfoOffset kEvalFrominstructionsOffsetOffset kThisPropertyAssignmentsOffset flag
Condition NegateCondition(Condition cond)
#define ASSERT_EQ(v1, v2)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping true
const int kSmiConstantRegisterValue
const uint32_t kIsIndirectStringTag
#define RUNTIME_ENTRY(name, nargs, ressize)
const Register kSmiConstantRegister
#define STATIC_ASSERT(test)
const uint32_t kAsciiStringTag
const uint32_t kStringEncodingMask