30 #if defined(V8_TARGET_ARCH_IA32)
45 : Assembler(arg_isolate, buffer, size),
46 generating_stub_(
false),
47 allow_stub_calls_(
true),
49 if (isolate() !=
NULL) {
50 code_object_ = Handle<Object>(isolate()->heap()->undefined_value(),
56 void MacroAssembler::InNewSpace(
61 Label::Distance condition_met_distance) {
63 if (scratch.is(
object)) {
64 and_(scratch, Immediate(~Page::kPageAlignmentMask));
66 mov(scratch, Immediate(~Page::kPageAlignmentMask));
67 and_(scratch,
object);
70 ASSERT(MemoryChunk::IN_FROM_SPACE < 8);
71 ASSERT(MemoryChunk::IN_TO_SPACE < 8);
72 int mask = (1 << MemoryChunk::IN_FROM_SPACE)
73 | (1 << MemoryChunk::IN_TO_SPACE);
75 test_b(Operand(scratch, MemoryChunk::kFlagsOffset),
76 static_cast<uint8_t>(mask));
77 j(cc, condition_met, condition_met_distance);
81 void MacroAssembler::RememberedSetHelper(
86 MacroAssembler::RememberedSetFinalAction and_then) {
88 if (FLAG_debug_code) {
90 JumpIfNotInNewSpace(
object, scratch, &ok, Label::kNear);
95 ExternalReference store_buffer =
96 ExternalReference::store_buffer_top(isolate());
97 mov(scratch, Operand::StaticVariable(store_buffer));
99 mov(Operand(scratch, 0), addr);
103 mov(Operand::StaticVariable(store_buffer), scratch);
106 test(scratch, Immediate(StoreBuffer::kStoreBufferOverflowBit));
107 if (and_then == kReturnAtEnd) {
108 Label buffer_overflowed;
109 j(
not_equal, &buffer_overflowed, Label::kNear);
111 bind(&buffer_overflowed);
113 ASSERT(and_then == kFallThroughAtEnd);
114 j(
equal, &done, Label::kNear);
116 StoreBufferOverflowStub store_buffer_overflow =
117 StoreBufferOverflowStub(save_fp);
118 CallStub(&store_buffer_overflow);
119 if (and_then == kReturnAtEnd) {
122 ASSERT(and_then == kFallThroughAtEnd);
128 void MacroAssembler::ClampDoubleToUint8(XMMRegister input_reg,
129 XMMRegister scratch_reg,
130 Register result_reg) {
132 ExternalReference zero_ref = ExternalReference::address_of_zero();
133 movdbl(scratch_reg, Operand::StaticVariable(zero_ref));
134 Set(result_reg, Immediate(0));
135 ucomisd(input_reg, scratch_reg);
136 j(
below, &done, Label::kNear);
137 ExternalReference half_ref = ExternalReference::address_of_one_half();
138 movdbl(scratch_reg, Operand::StaticVariable(half_ref));
139 addsd(scratch_reg, input_reg);
140 cvttsd2si(result_reg, Operand(scratch_reg));
141 test(result_reg, Immediate(0xFFFFFF00));
142 j(
zero, &done, Label::kNear);
143 Set(result_reg, Immediate(255));
148 void MacroAssembler::ClampUint8(Register reg) {
150 test(reg, Immediate(0xFFFFFF00));
151 j(
zero, &done, Label::kNear);
158 void MacroAssembler::RecordWriteArray(Register
object,
178 Register dst = index;
183 object, dst, value, save_fp, remembered_set_action,
OMIT_SMI_CHECK);
189 if (emit_debug_code()) {
190 mov(value, Immediate(BitCast<int32_t>(
kZapValue)));
191 mov(index, Immediate(BitCast<int32_t>(
kZapValue)));
196 void MacroAssembler::RecordWriteField(
210 JumpIfSmi(value, &done, Label::kNear);
218 if (emit_debug_code()) {
221 j(
zero, &ok, Label::kNear);
227 object, dst, value, save_fp, remembered_set_action,
OMIT_SMI_CHECK);
233 if (emit_debug_code()) {
234 mov(value, Immediate(BitCast<int32_t>(
kZapValue)));
235 mov(dst, Immediate(BitCast<int32_t>(
kZapValue)));
240 void MacroAssembler::RecordWriteForMap(
248 Register address = scratch1;
249 Register value = scratch2;
250 if (emit_debug_code()) {
252 lea(address,
FieldOperand(
object, HeapObject::kMapOffset));
254 j(
zero, &ok, Label::kNear);
259 ASSERT(!
object.is(value));
260 ASSERT(!
object.is(address));
261 ASSERT(!value.is(address));
262 if (emit_debug_code()) {
266 if (!FLAG_incremental_marking) {
274 ASSERT(!isolate()->heap()->InNewSpace(*map));
275 CheckPageFlagForMap(map,
276 MemoryChunk::kPointersToHereAreInterestingMask,
285 lea(address,
FieldOperand(
object, HeapObject::kMapOffset));
286 mov(value, Immediate(map));
294 if (emit_debug_code()) {
295 mov(value, Immediate(BitCast<int32_t>(
kZapValue)));
296 mov(scratch1, Immediate(BitCast<int32_t>(
kZapValue)));
297 mov(scratch2, Immediate(BitCast<int32_t>(
kZapValue)));
302 void MacroAssembler::RecordWrite(Register
object,
308 ASSERT(!
object.is(value));
309 ASSERT(!
object.is(address));
310 ASSERT(!value.is(address));
311 if (emit_debug_code()) {
316 !FLAG_incremental_marking) {
320 if (FLAG_debug_code) {
322 cmp(value, Operand(address, 0));
323 j(
equal, &ok, Label::kNear);
334 JumpIfSmi(value, &done, Label::kNear);
339 MemoryChunk::kPointersToHereAreInterestingMask,
343 CheckPageFlag(
object,
345 MemoryChunk::kPointersFromHereAreInterestingMask,
350 RecordWriteStub stub(
object, value, address, remembered_set_action, fp_mode);
357 if (emit_debug_code()) {
358 mov(address, Immediate(BitCast<int32_t>(
kZapValue)));
359 mov(value, Immediate(BitCast<int32_t>(
kZapValue)));
364 #ifdef ENABLE_DEBUGGER_SUPPORT
365 void MacroAssembler::DebugBreak() {
366 Set(
eax, Immediate(0));
367 mov(
ebx, Immediate(ExternalReference(Runtime::kDebugBreak, isolate())));
374 void MacroAssembler::Set(Register dst,
const Immediate& x) {
383 void MacroAssembler::Set(
const Operand& dst,
const Immediate& x) {
388 bool MacroAssembler::IsUnsafeImmediate(
const Immediate& x) {
389 static const int kMaxImmediateBits = 17;
391 return !
is_intn(x.x_, kMaxImmediateBits);
395 void MacroAssembler::SafeSet(Register dst,
const Immediate& x) {
396 if (IsUnsafeImmediate(x) && jit_cookie() != 0) {
397 Set(dst, Immediate(x.x_ ^ jit_cookie()));
398 xor_(dst, jit_cookie());
405 void MacroAssembler::SafePush(
const Immediate& x) {
406 if (IsUnsafeImmediate(x) && jit_cookie() != 0) {
407 push(Immediate(x.x_ ^ jit_cookie()));
408 xor_(Operand(
esp, 0), Immediate(jit_cookie()));
415 void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
417 Handle<Object> value(&isolate()->heap()->roots_array_start()[index]);
422 void MacroAssembler::CompareRoot(
const Operand& with,
423 Heap::RootListIndex index) {
425 Handle<Object> value(&isolate()->heap()->roots_array_start()[index]);
430 void MacroAssembler::CmpObjectType(Register heap_object,
433 mov(map,
FieldOperand(heap_object, HeapObject::kMapOffset));
434 CmpInstanceType(map, type);
438 void MacroAssembler::CmpInstanceType(Register map,
InstanceType type) {
440 static_cast<int8_t>(type));
444 void MacroAssembler::CheckFastElements(Register map,
446 Label::Distance distance) {
452 Map::kMaximumBitField2FastHoleyElementValue);
453 j(
above, fail, distance);
457 void MacroAssembler::CheckFastObjectElements(Register map,
459 Label::Distance distance) {
465 Map::kMaximumBitField2FastHoleySmiElementValue);
468 Map::kMaximumBitField2FastHoleyElementValue);
469 j(
above, fail, distance);
473 void MacroAssembler::CheckFastSmiElements(Register map,
475 Label::Distance distance) {
479 Map::kMaximumBitField2FastHoleySmiElementValue);
480 j(
above, fail, distance);
484 void MacroAssembler::StoreNumberToDoubleElements(
485 Register maybe_number,
489 XMMRegister scratch2,
491 bool specialize_for_processor) {
492 Label smi_value, done, maybe_nan, not_nan, is_nan, have_double_value;
493 JumpIfSmi(maybe_number, &smi_value, Label::kNear);
495 CheckMap(maybe_number,
496 isolate()->factory()->heap_number_map(),
507 ExternalReference canonical_nan_reference =
508 ExternalReference::address_of_canonical_non_hole_nan();
509 if (CpuFeatures::IsSupported(
SSE2) && specialize_for_processor) {
510 CpuFeatures::Scope use_sse2(
SSE2);
511 movdbl(scratch2,
FieldOperand(maybe_number, HeapNumber::kValueOffset));
512 bind(&have_double_value);
516 fld_d(
FieldOperand(maybe_number, HeapNumber::kValueOffset));
517 bind(&have_double_value);
525 j(
greater, &is_nan, Label::kNear);
526 cmp(
FieldOperand(maybe_number, HeapNumber::kValueOffset), Immediate(0));
529 if (CpuFeatures::IsSupported(
SSE2) && specialize_for_processor) {
530 CpuFeatures::Scope use_sse2(
SSE2);
531 movdbl(scratch2, Operand::StaticVariable(canonical_nan_reference));
533 fld_d(Operand::StaticVariable(canonical_nan_reference));
535 jmp(&have_double_value, Label::kNear);
540 mov(scratch1, maybe_number);
542 if (CpuFeatures::IsSupported(
SSE2) && specialize_for_processor) {
543 CpuFeatures::Scope fscope(
SSE2);
544 cvtsi2sd(scratch2, scratch1);
549 fild_s(Operand(
esp, 0));
557 void MacroAssembler::CompareMap(Register obj,
559 Label* early_success,
566 Map* current_map = *map;
569 current_map = current_map->LookupElementsTransitionMap(kind);
570 if (!current_map)
break;
571 j(
equal, early_success, Label::kNear);
573 Handle<Map>(current_map));
580 void MacroAssembler::CheckMap(Register obj,
586 JumpIfSmi(obj, fail);
590 CompareMap(obj, map, &success, mode);
596 void MacroAssembler::DispatchMap(Register obj,
598 Handle<Code> success,
602 JumpIfSmi(obj, &fail);
604 cmp(
FieldOperand(obj, HeapObject::kMapOffset), Immediate(map));
611 Condition MacroAssembler::IsObjectStringType(Register heap_object,
613 Register instance_type) {
614 mov(map,
FieldOperand(heap_object, HeapObject::kMapOffset));
615 movzx_b(instance_type,
FieldOperand(map, Map::kInstanceTypeOffset));
622 void MacroAssembler::IsObjectJSObjectType(Register heap_object,
626 mov(map,
FieldOperand(heap_object, HeapObject::kMapOffset));
627 IsInstanceJSObjectType(map, scratch, fail);
631 void MacroAssembler::IsInstanceJSObjectType(Register map,
634 movzx_b(scratch,
FieldOperand(map, Map::kInstanceTypeOffset));
642 void MacroAssembler::FCmp() {
643 if (CpuFeatures::IsSupported(
CMOV)) {
656 void MacroAssembler::AbortIfNotNumber(Register
object) {
658 JumpIfSmi(
object, &ok);
660 isolate()->factory()->heap_number_map());
661 Assert(
equal,
"Operand not a number");
666 void MacroAssembler::AbortIfNotSmi(Register
object) {
668 Assert(
equal,
"Operand is not a smi");
672 void MacroAssembler::AbortIfNotString(Register
object) {
674 Assert(
not_equal,
"Operand is not a string");
676 mov(
object,
FieldOperand(
object, HeapObject::kMapOffset));
679 Assert(
below,
"Operand is not a string");
683 void MacroAssembler::AbortIfSmi(Register
object) {
689 void MacroAssembler::EnterFrame(StackFrame::Type type) {
693 push(Immediate(Smi::FromInt(type)));
694 push(Immediate(CodeObject()));
695 if (emit_debug_code()) {
696 cmp(Operand(
esp, 0), Immediate(isolate()->factory()->undefined_value()));
697 Check(
not_equal,
"code object not properly patched");
702 void MacroAssembler::LeaveFrame(StackFrame::Type type) {
703 if (emit_debug_code()) {
704 cmp(Operand(
ebp, StandardFrameConstants::kMarkerOffset),
705 Immediate(Smi::FromInt(type)));
706 Check(
equal,
"stack frame types must match");
712 void MacroAssembler::EnterExitFramePrologue() {
723 push(Immediate(CodeObject()));
726 ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress,
728 ExternalReference context_address(Isolate::kContextAddress,
730 mov(Operand::StaticVariable(c_entry_fp_address),
ebp);
731 mov(Operand::StaticVariable(context_address),
esi);
735 void MacroAssembler::EnterExitFrameEpilogue(
int argc,
bool save_doubles) {
738 CpuFeatures::Scope scope(
SSE2);
740 sub(
esp, Immediate(space));
743 XMMRegister reg = XMMRegister::from_code(i);
747 sub(
esp, Immediate(argc * kPointerSize));
751 const int kFrameAlignment = OS::ActivationFrameAlignment();
752 if (kFrameAlignment > 0) {
754 and_(
esp, -kFrameAlignment);
758 mov(Operand(
ebp, ExitFrameConstants::kSPOffset),
esp);
762 void MacroAssembler::EnterExitFrame(
bool save_doubles) {
763 EnterExitFramePrologue();
766 int offset = StandardFrameConstants::kCallerSPOffset -
kPointerSize;
771 EnterExitFrameEpilogue(3, save_doubles);
775 void MacroAssembler::EnterApiExitFrame(
int argc) {
776 EnterExitFramePrologue();
777 EnterExitFrameEpilogue(argc,
false);
781 void MacroAssembler::LeaveExitFrame(
bool save_doubles) {
784 CpuFeatures::Scope scope(
SSE2);
787 XMMRegister reg = XMMRegister::from_code(i);
793 mov(
ecx, Operand(
ebp, 1 * kPointerSize));
794 mov(
ebp, Operand(
ebp, 0 * kPointerSize));
797 lea(
esp, Operand(
esi, 1 * kPointerSize));
802 LeaveExitFrameEpilogue();
805 void MacroAssembler::LeaveExitFrameEpilogue() {
807 ExternalReference context_address(Isolate::kContextAddress, isolate());
808 mov(
esi, Operand::StaticVariable(context_address));
810 mov(Operand::StaticVariable(context_address), Immediate(0));
814 ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress,
816 mov(Operand::StaticVariable(c_entry_fp_address), Immediate(0));
820 void MacroAssembler::LeaveApiExitFrame() {
824 LeaveExitFrameEpilogue();
828 void MacroAssembler::PushTryHandler(StackHandler::Kind kind,
831 STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
833 STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
834 STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
835 STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
836 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
840 if (kind == StackHandler::JS_ENTRY) {
845 push(Immediate(Smi::FromInt(0)));
852 StackHandler::IndexField::encode(handler_index) |
853 StackHandler::KindField::encode(kind);
854 push(Immediate(state));
858 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
859 push(Operand::StaticVariable(handler_address));
861 mov(Operand::StaticVariable(handler_address),
esp);
865 void MacroAssembler::PopTryHandler() {
867 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
868 pop(Operand::StaticVariable(handler_address));
869 add(
esp, Immediate(StackHandlerConstants::kSize - kPointerSize));
873 void MacroAssembler::JumpToHandlerEntry() {
878 shr(
edx, StackHandler::kKindWidth);
886 void MacroAssembler::Throw(Register value) {
888 STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
890 STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
891 STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
892 STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
893 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
896 if (!value.is(
eax)) {
900 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
901 mov(
esp, Operand::StaticVariable(handler_address));
903 pop(Operand::StaticVariable(handler_address));
918 j(
zero, &skip, Label::kNear);
919 mov(Operand(
ebp, StandardFrameConstants::kContextOffset),
esi);
922 JumpToHandlerEntry();
926 void MacroAssembler::ThrowUncatchable(Register value) {
928 STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
930 STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
931 STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
932 STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
933 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
936 if (!value.is(
eax)) {
940 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
941 mov(
esp, Operand::StaticVariable(handler_address));
944 Label fetch_next, check_kind;
945 jmp(&check_kind, Label::kNear);
947 mov(
esp, Operand(
esp, StackHandlerConstants::kNextOffset));
951 test(Operand(
esp, StackHandlerConstants::kStateOffset),
952 Immediate(StackHandler::KindField::kMask));
956 pop(Operand::StaticVariable(handler_address));
966 JumpToHandlerEntry();
970 void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
975 ASSERT(!holder_reg.is(scratch));
978 mov(scratch, Operand(
ebp, StandardFrameConstants::kContextOffset));
981 if (emit_debug_code()) {
982 cmp(scratch, Immediate(0));
983 Check(
not_equal,
"we should not have an empty lexical context");
986 int offset = Context::kHeaderSize + Context::GLOBAL_INDEX *
kPointerSize;
988 mov(scratch,
FieldOperand(scratch, GlobalObject::kGlobalContextOffset));
991 if (emit_debug_code()) {
994 mov(scratch,
FieldOperand(scratch, HeapObject::kMapOffset));
995 cmp(scratch, isolate()->factory()->global_context_map());
996 Check(
equal,
"JSGlobalObject::global_context should be a global context.");
1001 cmp(scratch,
FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
1002 j(
equal, &same_contexts);
1012 mov(holder_reg,
FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
1015 if (emit_debug_code()) {
1016 cmp(holder_reg, isolate()->factory()->null_value());
1017 Check(
not_equal,
"JSGlobalProxy::context() should not be null.");
1021 mov(holder_reg,
FieldOperand(holder_reg, HeapObject::kMapOffset));
1022 cmp(holder_reg, isolate()->factory()->global_context_map());
1023 Check(
equal,
"JSGlobalObject::global_context should be a global context.");
1027 int token_offset = Context::kHeaderSize +
1034 bind(&same_contexts);
1042 void MacroAssembler::GetNumberHash(Register
r0, Register scratch) {
1044 if (Serializer::enabled()) {
1045 ExternalReference roots_array_start =
1046 ExternalReference::roots_array_start(isolate());
1047 mov(scratch, Immediate(Heap::kHashSeedRootIndex));
1054 xor_(r0, Immediate(seed));
1067 lea(r0, Operand(r0, r0,
times_4, 0));
1082 void MacroAssembler::LoadFromNumberDictionary(Label* miss,
1107 GetNumberHash(r0, r1);
1110 mov(r1,
FieldOperand(elements, SeededNumberDictionary::kCapacityOffset));
1115 const int kProbes = 4;
1116 for (
int i = 0; i < kProbes; i++) {
1121 add(r2, Immediate(SeededNumberDictionary::GetProbeOffset(i)));
1126 ASSERT(SeededNumberDictionary::kEntrySize == 3);
1127 lea(r2, Operand(r2, r2,
times_2, 0));
1133 SeededNumberDictionary::kElementsStartOffset));
1134 if (i != (kProbes - 1)) {
1143 const int kDetailsOffset =
1144 SeededNumberDictionary::kElementsStartOffset + 2 *
kPointerSize;
1147 Immediate(PropertyDetails::TypeField::kMask <<
kSmiTagSize));
1151 const int kValueOffset =
1152 SeededNumberDictionary::kElementsStartOffset +
kPointerSize;
1157 void MacroAssembler::LoadAllocationTopHelper(Register result,
1160 ExternalReference new_space_allocation_top =
1161 ExternalReference::new_space_allocation_top_address(isolate());
1169 cmp(result, Operand::StaticVariable(new_space_allocation_top));
1170 Check(
equal,
"Unexpected allocation top");
1176 if (scratch.is(
no_reg)) {
1177 mov(result, Operand::StaticVariable(new_space_allocation_top));
1179 mov(scratch, Immediate(new_space_allocation_top));
1180 mov(result, Operand(scratch, 0));
1185 void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
1187 if (emit_debug_code()) {
1189 Check(
zero,
"Unaligned allocation in new space");
1192 ExternalReference new_space_allocation_top =
1193 ExternalReference::new_space_allocation_top_address(isolate());
1196 if (scratch.is(
no_reg)) {
1197 mov(Operand::StaticVariable(new_space_allocation_top), result_end);
1199 mov(Operand(scratch, 0), result_end);
1204 void MacroAssembler::AllocateInNewSpace(
int object_size,
1206 Register result_end,
1210 if (!FLAG_inline_new) {
1211 if (emit_debug_code()) {
1213 mov(result, Immediate(0x7091));
1214 if (result_end.is_valid()) {
1215 mov(result_end, Immediate(0x7191));
1217 if (scratch.is_valid()) {
1218 mov(scratch, Immediate(0x7291));
1224 ASSERT(!result.is(result_end));
1227 LoadAllocationTopHelper(result, scratch, flags);
1229 Register top_reg = result_end.is_valid() ? result_end : result;
1232 ExternalReference new_space_allocation_limit =
1233 ExternalReference::new_space_allocation_limit_address(isolate());
1235 if (!top_reg.is(result)) {
1236 mov(top_reg, result);
1238 add(top_reg, Immediate(object_size));
1239 j(
carry, gc_required);
1240 cmp(top_reg, Operand::StaticVariable(new_space_allocation_limit));
1241 j(
above, gc_required);
1244 UpdateAllocationTopHelper(top_reg, scratch);
1247 if (top_reg.is(result)) {
1251 sub(result, Immediate(object_size));
1253 }
else if ((flags & TAG_OBJECT) != 0) {
1259 void MacroAssembler::AllocateInNewSpace(
int header_size,
1261 Register element_count,
1263 Register result_end,
1267 if (!FLAG_inline_new) {
1268 if (emit_debug_code()) {
1270 mov(result, Immediate(0x7091));
1271 mov(result_end, Immediate(0x7191));
1272 if (scratch.is_valid()) {
1273 mov(scratch, Immediate(0x7291));
1280 ASSERT(!result.is(result_end));
1283 LoadAllocationTopHelper(result, scratch, flags);
1286 ExternalReference new_space_allocation_limit =
1287 ExternalReference::new_space_allocation_limit_address(isolate());
1291 lea(result_end, Operand(element_count, element_size, header_size));
1292 add(result_end, result);
1293 j(
carry, gc_required);
1294 cmp(result_end, Operand::StaticVariable(new_space_allocation_limit));
1295 j(
above, gc_required);
1298 if ((flags & TAG_OBJECT) != 0) {
1303 UpdateAllocationTopHelper(result_end, scratch);
1307 void MacroAssembler::AllocateInNewSpace(Register object_size,
1309 Register result_end,
1313 if (!FLAG_inline_new) {
1314 if (emit_debug_code()) {
1316 mov(result, Immediate(0x7091));
1317 mov(result_end, Immediate(0x7191));
1318 if (scratch.is_valid()) {
1319 mov(scratch, Immediate(0x7291));
1326 ASSERT(!result.is(result_end));
1329 LoadAllocationTopHelper(result, scratch, flags);
1332 ExternalReference new_space_allocation_limit =
1333 ExternalReference::new_space_allocation_limit_address(isolate());
1334 if (!object_size.is(result_end)) {
1335 mov(result_end, object_size);
1337 add(result_end, result);
1338 j(
carry, gc_required);
1339 cmp(result_end, Operand::StaticVariable(new_space_allocation_limit));
1340 j(
above, gc_required);
1343 if ((flags & TAG_OBJECT) != 0) {
1348 UpdateAllocationTopHelper(result_end, scratch);
1352 void MacroAssembler::UndoAllocationInNewSpace(Register
object) {
1353 ExternalReference new_space_allocation_top =
1354 ExternalReference::new_space_allocation_top_address(isolate());
1359 cmp(
object, Operand::StaticVariable(new_space_allocation_top));
1360 Check(
below,
"Undo allocation of non allocated memory");
1362 mov(Operand::StaticVariable(new_space_allocation_top),
object);
1366 void MacroAssembler::AllocateHeapNumber(Register result,
1369 Label* gc_required) {
1371 AllocateInNewSpace(HeapNumber::kSize,
1380 Immediate(isolate()->factory()->heap_number_map()));
1384 void MacroAssembler::AllocateTwoByteString(Register result,
1389 Label* gc_required) {
1399 AllocateInNewSpace(SeqTwoByteString::kHeaderSize,
1410 Immediate(isolate()->factory()->string_map()));
1411 mov(scratch1, length);
1413 mov(
FieldOperand(result, String::kLengthOffset), scratch1);
1415 Immediate(String::kEmptyHashField));
1419 void MacroAssembler::AllocateAsciiString(Register result,
1424 Label* gc_required) {
1428 mov(scratch1, length);
1434 AllocateInNewSpace(SeqAsciiString::kHeaderSize,
1445 Immediate(isolate()->factory()->ascii_string_map()));
1446 mov(scratch1, length);
1448 mov(
FieldOperand(result, String::kLengthOffset), scratch1);
1450 Immediate(String::kEmptyHashField));
1454 void MacroAssembler::AllocateAsciiString(Register result,
1458 Label* gc_required) {
1462 AllocateInNewSpace(SeqAsciiString::SizeFor(length),
1471 Immediate(isolate()->factory()->ascii_string_map()));
1473 Immediate(Smi::FromInt(length)));
1475 Immediate(String::kEmptyHashField));
1479 void MacroAssembler::AllocateTwoByteConsString(Register result,
1482 Label* gc_required) {
1484 AllocateInNewSpace(ConsString::kSize,
1493 Immediate(isolate()->factory()->cons_string_map()));
1497 void MacroAssembler::AllocateAsciiConsString(Register result,
1500 Label* gc_required) {
1502 AllocateInNewSpace(ConsString::kSize,
1511 Immediate(isolate()->factory()->cons_ascii_string_map()));
1515 void MacroAssembler::AllocateTwoByteSlicedString(Register result,
1518 Label* gc_required) {
1520 AllocateInNewSpace(SlicedString::kSize,
1529 Immediate(isolate()->factory()->sliced_string_map()));
1533 void MacroAssembler::AllocateAsciiSlicedString(Register result,
1536 Label* gc_required) {
1538 AllocateInNewSpace(SlicedString::kSize,
1547 Immediate(isolate()->factory()->sliced_ascii_string_map()));
1559 void MacroAssembler::CopyBytes(Register source,
1560 Register destination,
1563 Label loop, done, short_string, short_loop;
1565 cmp(length, Immediate(10));
1575 mov(scratch, Operand(source, length,
times_1, -4));
1576 mov(Operand(destination, length,
times_1, -4), scratch);
1580 and_(scratch, Immediate(0x3));
1581 add(destination, scratch);
1584 bind(&short_string);
1585 test(length, length);
1589 mov_b(scratch, Operand(source, 0));
1590 mov_b(Operand(destination, 0), scratch);
1600 void MacroAssembler::InitializeFieldsWithFiller(Register start_offset,
1601 Register end_offset,
1606 mov(Operand(start_offset, 0), filler);
1607 add(start_offset, Immediate(kPointerSize));
1609 cmp(start_offset, end_offset);
1614 void MacroAssembler::BooleanBitTest(Register
object,
1621 test_b(
FieldOperand(
object, field_offset + byte_index),
1622 static_cast<byte>(1 << byte_bit_index));
1627 void MacroAssembler::NegativeZeroTest(Register result,
1629 Label* then_label) {
1631 test(result, result);
1634 j(
sign, then_label);
1639 void MacroAssembler::NegativeZeroTest(Register result,
1643 Label* then_label) {
1645 test(result, result);
1649 j(
sign, then_label);
1654 void MacroAssembler::TryGetFunctionPrototype(Register
function,
1658 bool miss_on_bound_function) {
1660 JumpIfSmi(
function, miss);
1666 if (miss_on_bound_function) {
1669 FieldOperand(
function, JSFunction::kSharedFunctionInfoOffset));
1670 BooleanBitTest(scratch, SharedFunctionInfo::kCompilerHintsOffset,
1671 SharedFunctionInfo::kBoundFunction);
1677 movzx_b(scratch,
FieldOperand(result, Map::kBitFieldOffset));
1678 test(scratch, Immediate(1 << Map::kHasNonInstancePrototype));
1683 FieldOperand(
function, JSFunction::kPrototypeOrInitialMapOffset));
1688 cmp(result, Immediate(isolate()->factory()->the_hole_value()));
1693 CmpObjectType(result,
MAP_TYPE, scratch);
1697 mov(result,
FieldOperand(result, Map::kPrototypeOffset));
1702 bind(&non_instance);
1703 mov(result,
FieldOperand(result, Map::kConstructorOffset));
1710 void MacroAssembler::CallStub(CodeStub* stub,
unsigned ast_id) {
1711 ASSERT(AllowThisStubCall(stub));
1712 call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id);
1716 void MacroAssembler::TailCallStub(CodeStub* stub) {
1717 ASSERT(allow_stub_calls_ || stub->CompilingCallsToThisStubIsGCSafe());
1718 jmp(stub->GetCode(), RelocInfo::CODE_TARGET);
1722 void MacroAssembler::StubReturn(
int argc) {
1723 ASSERT(argc >= 1 && generating_stub());
1724 ret((argc - 1) * kPointerSize);
1728 bool MacroAssembler::AllowThisStubCall(CodeStub* stub) {
1729 if (!has_frame_ && stub->SometimesSetsUpAFrame())
return false;
1730 return allow_stub_calls_ || stub->CompilingCallsToThisStubIsGCSafe();
1734 void MacroAssembler::IllegalOperation(
int num_arguments) {
1735 if (num_arguments > 0) {
1736 add(
esp, Immediate(num_arguments * kPointerSize));
1738 mov(
eax, Immediate(isolate()->factory()->undefined_value()));
1742 void MacroAssembler::IndexFromHash(Register hash, Register index) {
1747 (1 << String::kArrayIndexValueBits));
1750 and_(hash, String::kArrayIndexValueMask);
1755 if (!index.is(hash)) {
1761 void MacroAssembler::CallRuntime(Runtime::FunctionId
id,
int num_arguments) {
1762 CallRuntime(Runtime::FunctionForId(
id), num_arguments);
1766 void MacroAssembler::CallRuntimeSaveDoubles(Runtime::FunctionId
id) {
1767 const Runtime::Function*
function = Runtime::FunctionForId(
id);
1768 Set(
eax, Immediate(function->nargs));
1769 mov(
ebx, Immediate(ExternalReference(
function, isolate())));
1775 void MacroAssembler::CallRuntime(
const Runtime::Function* f,
1776 int num_arguments) {
1780 if (f->nargs >= 0 && f->nargs != num_arguments) {
1781 IllegalOperation(num_arguments);
1789 Set(
eax, Immediate(num_arguments));
1790 mov(
ebx, Immediate(ExternalReference(f, isolate())));
1796 void MacroAssembler::CallExternalReference(ExternalReference ref,
1797 int num_arguments) {
1798 mov(
eax, Immediate(num_arguments));
1799 mov(
ebx, Immediate(ref));
1806 void MacroAssembler::TailCallExternalReference(
const ExternalReference& ext,
1813 Set(
eax, Immediate(num_arguments));
1814 JumpToExternalReference(ext);
1818 void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
1821 TailCallExternalReference(ExternalReference(fid, isolate()),
1833 #if defined(USING_BSD_ABI) || defined(__MINGW32__) || defined(__CYGWIN__)
1834 static const bool kReturnHandlesDirectly =
true;
1836 static const bool kReturnHandlesDirectly =
false;
1842 esp, (index + (kReturnHandlesDirectly ? 0 : 1)) * kPointerSize);
1846 void MacroAssembler::PrepareCallApiFunction(
int argc) {
1847 if (kReturnHandlesDirectly) {
1848 EnterApiExitFrame(argc);
1851 if (emit_debug_code()) {
1856 EnterApiExitFrame(argc + 2);
1866 lea(
esi, Operand(
esp, (argc + 1) * kPointerSize));
1867 mov(Operand(
esp, 0 * kPointerSize),
esi);
1868 if (emit_debug_code()) {
1869 mov(Operand(
esi, 0), Immediate(0));
1875 void MacroAssembler::CallApiFunctionAndReturn(
Address function_address,
1877 ExternalReference next_address =
1878 ExternalReference::handle_scope_next_address();
1879 ExternalReference limit_address =
1880 ExternalReference::handle_scope_limit_address();
1881 ExternalReference level_address =
1882 ExternalReference::handle_scope_level_address();
1885 mov(
ebx, Operand::StaticVariable(next_address));
1886 mov(
edi, Operand::StaticVariable(limit_address));
1887 add(Operand::StaticVariable(level_address), Immediate(1));
1892 if (!kReturnHandlesDirectly) {
1895 mov(
eax, Operand(
esi, 0));
1900 Label promote_scheduled_exception;
1901 Label delete_allocated_handles;
1902 Label leave_exit_frame;
1906 j(
zero, &empty_handle);
1908 mov(
eax, Operand(
eax, 0));
1912 mov(Operand::StaticVariable(next_address),
ebx);
1913 sub(Operand::StaticVariable(level_address), Immediate(1));
1915 cmp(
edi, Operand::StaticVariable(limit_address));
1916 j(
not_equal, &delete_allocated_handles);
1917 bind(&leave_exit_frame);
1920 ExternalReference scheduled_exception_address =
1921 ExternalReference::scheduled_exception_address(isolate());
1922 cmp(Operand::StaticVariable(scheduled_exception_address),
1923 Immediate(isolate()->factory()->the_hole_value()));
1924 j(
not_equal, &promote_scheduled_exception);
1925 LeaveApiExitFrame();
1926 ret(stack_space * kPointerSize);
1927 bind(&promote_scheduled_exception);
1928 TailCallRuntime(Runtime::kPromoteScheduledException, 0, 1);
1930 bind(&empty_handle);
1932 mov(
eax, isolate()->factory()->undefined_value());
1936 ExternalReference delete_extensions =
1937 ExternalReference::delete_handle_scope_extensions(isolate());
1938 bind(&delete_allocated_handles);
1939 mov(Operand::StaticVariable(limit_address),
edi);
1941 mov(Operand(
esp, 0), Immediate(ExternalReference::isolate_address()));
1942 mov(
eax, Immediate(delete_extensions));
1945 jmp(&leave_exit_frame);
1949 void MacroAssembler::JumpToExternalReference(
const ExternalReference& ext) {
1951 mov(
ebx, Immediate(ext));
1953 jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
1957 void MacroAssembler::SetCallKind(Register dst,
CallKind call_kind) {
1974 void MacroAssembler::InvokePrologue(
const ParameterCount& expected,
1975 const ParameterCount& actual,
1976 Handle<Code> code_constant,
1977 const Operand& code_operand,
1979 bool* definitely_mismatches,
1981 Label::Distance done_near,
1982 const CallWrapper& call_wrapper,
1984 bool definitely_matches =
false;
1985 *definitely_mismatches =
false;
1987 if (expected.is_immediate()) {
1988 ASSERT(actual.is_immediate());
1989 if (expected.immediate() == actual.immediate()) {
1990 definitely_matches =
true;
1992 mov(
eax, actual.immediate());
1993 const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
1994 if (expected.immediate() == sentinel) {
1999 definitely_matches =
true;
2001 *definitely_mismatches =
true;
2002 mov(
ebx, expected.immediate());
2006 if (actual.is_immediate()) {
2010 cmp(expected.reg(), actual.immediate());
2013 mov(
eax, actual.immediate());
2014 }
else if (!expected.reg().is(actual.reg())) {
2017 cmp(expected.reg(), actual.reg());
2024 if (!definitely_matches) {
2025 Handle<Code> adaptor =
2026 isolate()->builtins()->ArgumentsAdaptorTrampoline();
2027 if (!code_constant.is_null()) {
2028 mov(
edx, Immediate(code_constant));
2030 }
else if (!code_operand.is_reg(
edx)) {
2031 mov(
edx, code_operand);
2035 call_wrapper.BeforeCall(CallSize(adaptor, RelocInfo::CODE_TARGET));
2036 SetCallKind(
ecx, call_kind);
2037 call(adaptor, RelocInfo::CODE_TARGET);
2038 call_wrapper.AfterCall();
2039 if (!*definitely_mismatches) {
2040 jmp(done, done_near);
2043 SetCallKind(
ecx, call_kind);
2044 jmp(adaptor, RelocInfo::CODE_TARGET);
2051 void MacroAssembler::InvokeCode(
const Operand& code,
2052 const ParameterCount& expected,
2053 const ParameterCount& actual,
2055 const CallWrapper& call_wrapper,
2061 bool definitely_mismatches =
false;
2062 InvokePrologue(expected, actual, Handle<Code>::null(), code,
2063 &done, &definitely_mismatches, flag, Label::kNear,
2064 call_wrapper, call_kind);
2065 if (!definitely_mismatches) {
2067 call_wrapper.BeforeCall(CallSize(code));
2068 SetCallKind(
ecx, call_kind);
2070 call_wrapper.AfterCall();
2073 SetCallKind(
ecx, call_kind);
2081 void MacroAssembler::InvokeCode(Handle<Code> code,
2082 const ParameterCount& expected,
2083 const ParameterCount& actual,
2084 RelocInfo::Mode rmode,
2086 const CallWrapper& call_wrapper,
2092 Operand dummy(
eax, 0);
2093 bool definitely_mismatches =
false;
2094 InvokePrologue(expected, actual, code, dummy, &done, &definitely_mismatches,
2095 flag, Label::kNear, call_wrapper, call_kind);
2096 if (!definitely_mismatches) {
2098 call_wrapper.BeforeCall(CallSize(code, rmode));
2099 SetCallKind(
ecx, call_kind);
2101 call_wrapper.AfterCall();
2104 SetCallKind(
ecx, call_kind);
2112 void MacroAssembler::InvokeFunction(Register fun,
2113 const ParameterCount& actual,
2115 const CallWrapper& call_wrapper,
2126 ParameterCount expected(
ebx);
2128 expected, actual, flag, call_wrapper, call_kind);
2132 void MacroAssembler::InvokeFunction(Handle<JSFunction>
function,
2133 const ParameterCount& actual,
2135 const CallWrapper& call_wrapper,
2141 LoadHeapObject(
edi,
function);
2144 ParameterCount expected(function->shared()->formal_parameter_count());
2149 expected, actual, flag, call_wrapper, call_kind);
2153 void MacroAssembler::InvokeBuiltin(Builtins::JavaScript
id,
2155 const CallWrapper& call_wrapper) {
2162 ParameterCount expected(0);
2163 GetBuiltinFunction(
edi,
id);
2169 void MacroAssembler::GetBuiltinFunction(Register target,
2170 Builtins::JavaScript
id) {
2172 mov(target, Operand(
esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
2173 mov(target,
FieldOperand(target, GlobalObject::kBuiltinsOffset));
2175 JSBuiltinsObject::OffsetOfFunctionWithId(
id)));
2179 void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript
id) {
2182 GetBuiltinFunction(
edi,
id);
2188 void MacroAssembler::LoadContext(Register dst,
int context_chain_length) {
2189 if (context_chain_length > 0) {
2191 mov(dst, Operand(
esi, Context::SlotOffset(Context::PREVIOUS_INDEX)));
2192 for (
int i = 1; i < context_chain_length; i++) {
2193 mov(dst, Operand(dst, Context::SlotOffset(Context::PREVIOUS_INDEX)));
2206 if (emit_debug_code()) {
2208 isolate()->factory()->with_context_map());
2209 Check(
not_equal,
"Variable resolved to with context.");
2214 void MacroAssembler::LoadTransitionedArrayMapConditional(
2217 Register map_in_out,
2219 Label* no_map_match) {
2221 mov(scratch, Operand(
esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
2222 mov(scratch,
FieldOperand(scratch, GlobalObject::kGlobalContextOffset));
2225 mov(scratch, Operand(scratch,
2226 Context::SlotOffset(Context::JS_ARRAY_MAPS_INDEX)));
2228 size_t offset = expected_kind * kPointerSize +
2229 FixedArrayBase::kHeaderSize;
2234 offset = transitioned_kind * kPointerSize +
2235 FixedArrayBase::kHeaderSize;
2240 void MacroAssembler::LoadInitialArrayMap(
2241 Register function_in, Register scratch,
2242 Register map_out,
bool can_have_holes) {
2243 ASSERT(!function_in.is(map_out));
2246 JSFunction::kPrototypeOrInitialMapOffset));
2247 if (!FLAG_smi_only_arrays) {
2254 }
else if (can_have_holes) {
2265 void MacroAssembler::LoadGlobalFunction(
int index, Register
function) {
2267 mov(
function, Operand(
esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
2269 mov(
function,
FieldOperand(
function, GlobalObject::kGlobalContextOffset));
2271 mov(
function, Operand(
function, Context::SlotOffset(index)));
2275 void MacroAssembler::LoadGlobalFunctionInitialMap(Register
function,
2278 mov(map,
FieldOperand(
function, JSFunction::kPrototypeOrInitialMapOffset));
2279 if (emit_debug_code()) {
2281 CheckMap(map, isolate()->factory()->meta_map(), &fail,
DO_SMI_CHECK);
2284 Abort(
"Global functions must have initial map");
2292 void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Register src) {
2293 mov(SafepointRegisterSlot(dst), src);
2297 void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Immediate src) {
2298 mov(SafepointRegisterSlot(dst), src);
2302 void MacroAssembler::LoadFromSafepointRegisterSlot(Register dst, Register src) {
2303 mov(dst, SafepointRegisterSlot(src));
2307 Operand MacroAssembler::SafepointRegisterSlot(Register reg) {
2308 return Operand(
esp, SafepointRegisterStackIndex(reg.code()) * kPointerSize);
2312 int MacroAssembler::SafepointRegisterStackIndex(
int reg_code) {
2321 void MacroAssembler::LoadHeapObject(Register result,
2322 Handle<HeapObject>
object) {
2323 if (isolate()->heap()->InNewSpace(*
object)) {
2324 Handle<JSGlobalPropertyCell> cell =
2325 isolate()->factory()->NewJSGlobalPropertyCell(
object);
2326 mov(result, Operand::Cell(cell));
2328 mov(result,
object);
2333 void MacroAssembler::PushHeapObject(Handle<HeapObject>
object) {
2334 if (isolate()->heap()->InNewSpace(*
object)) {
2335 Handle<JSGlobalPropertyCell> cell =
2336 isolate()->factory()->NewJSGlobalPropertyCell(
object);
2337 push(Operand::Cell(cell));
2344 void MacroAssembler::Ret() {
2349 void MacroAssembler::Ret(
int bytes_dropped, Register scratch) {
2354 add(
esp, Immediate(bytes_dropped));
2361 void MacroAssembler::Drop(
int stack_elements) {
2362 if (stack_elements > 0) {
2363 add(
esp, Immediate(stack_elements * kPointerSize));
2368 void MacroAssembler::Move(Register dst, Register src) {
2375 void MacroAssembler::SetCounter(StatsCounter* counter,
int value) {
2376 if (FLAG_native_code_counters && counter->Enabled()) {
2377 mov(Operand::StaticVariable(ExternalReference(counter)), Immediate(value));
2382 void MacroAssembler::IncrementCounter(StatsCounter* counter,
int value) {
2384 if (FLAG_native_code_counters && counter->Enabled()) {
2385 Operand operand = Operand::StaticVariable(ExternalReference(counter));
2389 add(operand, Immediate(value));
2395 void MacroAssembler::DecrementCounter(StatsCounter* counter,
int value) {
2397 if (FLAG_native_code_counters && counter->Enabled()) {
2398 Operand operand = Operand::StaticVariable(ExternalReference(counter));
2402 sub(operand, Immediate(value));
2408 void MacroAssembler::IncrementCounter(
Condition cc,
2409 StatsCounter* counter,
2412 if (FLAG_native_code_counters && counter->Enabled()) {
2416 IncrementCounter(counter, value);
2423 void MacroAssembler::DecrementCounter(
Condition cc,
2424 StatsCounter* counter,
2427 if (FLAG_native_code_counters && counter->Enabled()) {
2431 DecrementCounter(counter, value);
2438 void MacroAssembler::Assert(
Condition cc,
const char* msg) {
2439 if (emit_debug_code()) Check(cc, msg);
2443 void MacroAssembler::AssertFastElements(Register elements) {
2444 if (emit_debug_code()) {
2445 Factory* factory = isolate()->factory();
2448 Immediate(factory->fixed_array_map()));
2451 Immediate(factory->fixed_double_array_map()));
2454 Immediate(factory->fixed_cow_array_map()));
2456 Abort(
"JSObject with fast elements map has slow elements");
2462 void MacroAssembler::Check(
Condition cc,
const char* msg) {
2471 void MacroAssembler::CheckStackAlignment() {
2472 int frame_alignment = OS::ActivationFrameAlignment();
2473 int frame_alignment_mask = frame_alignment - 1;
2474 if (frame_alignment > kPointerSize) {
2476 Label alignment_as_expected;
2477 test(
esp, Immediate(frame_alignment_mask));
2478 j(
zero, &alignment_as_expected);
2481 bind(&alignment_as_expected);
2486 void MacroAssembler::Abort(
const char* msg) {
2492 intptr_t
p1 =
reinterpret_cast<intptr_t
>(msg);
2494 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
2497 RecordComment(
"Abort message: ");
2503 push(Immediate(p0));
2504 push(Immediate(reinterpret_cast<intptr_t>(Smi::FromInt(p1 - p0))));
2510 CallRuntime(Runtime::kAbort, 2);
2512 CallRuntime(Runtime::kAbort, 2);
2519 void MacroAssembler::LoadInstanceDescriptors(Register map,
2520 Register descriptors) {
2522 FieldOperand(map, Map::kInstanceDescriptorsOrBitField3Offset));
2524 JumpIfNotSmi(descriptors, ¬_smi);
2525 mov(descriptors, isolate()->factory()->empty_descriptor_array());
2530 void MacroAssembler::LoadPowerOf2(XMMRegister dst,
2534 HeapNumber::kExponentBits));
2535 mov(scratch, Immediate(power + HeapNumber::kExponentBias));
2537 psllq(dst, HeapNumber::kMantissaBits);
2541 void MacroAssembler::JumpIfInstanceTypeIsNotSequentialAscii(
2542 Register instance_type,
2545 if (!scratch.is(instance_type)) {
2546 mov(scratch, instance_type);
2555 void MacroAssembler::JumpIfNotBothSequentialAsciiStrings(Register object1,
2562 mov(scratch1, object1);
2563 and_(scratch1, object2);
2564 JumpIfSmi(scratch1, failure);
2567 mov(scratch1,
FieldOperand(object1, HeapObject::kMapOffset));
2568 mov(scratch2,
FieldOperand(object2, HeapObject::kMapOffset));
2569 movzx_b(scratch1,
FieldOperand(scratch1, Map::kInstanceTypeOffset));
2570 movzx_b(scratch2,
FieldOperand(scratch2, Map::kInstanceTypeOffset));
2573 const int kFlatAsciiStringMask =
2577 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
2578 and_(scratch1, kFlatAsciiStringMask);
2579 and_(scratch2, kFlatAsciiStringMask);
2580 lea(scratch1, Operand(scratch1, scratch2,
times_8, 0));
2581 cmp(scratch1, kFlatAsciiStringTag | (kFlatAsciiStringTag << 3));
2586 void MacroAssembler::PrepareCallCFunction(
int num_arguments, Register scratch) {
2587 int frame_alignment = OS::ActivationFrameAlignment();
2588 if (frame_alignment != 0) {
2592 sub(
esp, Immediate((num_arguments + 1) * kPointerSize));
2594 and_(
esp, -frame_alignment);
2595 mov(Operand(
esp, num_arguments * kPointerSize), scratch);
2597 sub(
esp, Immediate(num_arguments * kPointerSize));
2602 void MacroAssembler::CallCFunction(ExternalReference
function,
2603 int num_arguments) {
2605 mov(
eax, Immediate(
function));
2606 CallCFunction(
eax, num_arguments);
2610 void MacroAssembler::CallCFunction(Register
function,
2611 int num_arguments) {
2614 if (emit_debug_code()) {
2615 CheckStackAlignment();
2619 if (OS::ActivationFrameAlignment() != 0) {
2620 mov(
esp, Operand(
esp, num_arguments * kPointerSize));
2622 add(
esp, Immediate(num_arguments * kPointerSize));
2627 bool AreAliased(Register r1, Register r2, Register
r3, Register
r4) {
2628 if (r1.is(r2))
return true;
2629 if (r1.is(r3))
return true;
2630 if (r1.is(r4))
return true;
2631 if (r2.is(r3))
return true;
2632 if (r2.is(r4))
return true;
2633 if (r3.is(r4))
return true;
2638 CodePatcher::CodePatcher(
byte* address,
int size)
2639 : address_(address),
2641 masm_(
NULL, address, size + Assembler::kGap) {
2645 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2649 CodePatcher::~CodePatcher() {
2651 CPU::FlushICache(address_, size_);
2654 ASSERT(masm_.pc_ == address_ + size_);
2655 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2659 void MacroAssembler::CheckPageFlag(
2664 Label* condition_met,
2665 Label::Distance condition_met_distance) {
2667 if (scratch.is(
object)) {
2668 and_(scratch, Immediate(~Page::kPageAlignmentMask));
2670 mov(scratch, Immediate(~Page::kPageAlignmentMask));
2671 and_(scratch,
object);
2674 test_b(Operand(scratch, MemoryChunk::kFlagsOffset),
2675 static_cast<uint8_t>(mask));
2677 test(Operand(scratch, MemoryChunk::kFlagsOffset), Immediate(mask));
2679 j(cc, condition_met, condition_met_distance);
2683 void MacroAssembler::CheckPageFlagForMap(
2687 Label* condition_met,
2688 Label::Distance condition_met_distance) {
2690 Page* page = Page::FromAddress(map->address());
2691 ExternalReference reference(ExternalReference::page_flags(page));
2694 ASSERT(!isolate()->heap()->mark_compact_collector()->
2695 IsOnEvacuationCandidate(*map));
2697 test_b(Operand::StaticVariable(reference), static_cast<uint8_t>(mask));
2699 test(Operand::StaticVariable(reference), Immediate(mask));
2701 j(cc, condition_met, condition_met_distance);
2705 void MacroAssembler::JumpIfBlack(Register
object,
2709 Label::Distance on_black_near) {
2710 HasColor(
object, scratch0, scratch1,
2711 on_black, on_black_near,
2713 ASSERT(strcmp(Marking::kBlackBitPattern,
"10") == 0);
2717 void MacroAssembler::HasColor(Register
object,
2718 Register bitmap_scratch,
2719 Register mask_scratch,
2721 Label::Distance has_color_distance,
2726 GetMarkBits(
object, bitmap_scratch, mask_scratch);
2728 Label other_color, word_boundary;
2729 test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
2730 j(first_bit == 1 ?
zero :
not_zero, &other_color, Label::kNear);
2731 add(mask_scratch, mask_scratch);
2732 j(
zero, &word_boundary, Label::kNear);
2733 test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
2734 j(second_bit == 1 ?
not_zero :
zero, has_color, has_color_distance);
2735 jmp(&other_color, Label::kNear);
2737 bind(&word_boundary);
2738 test_b(Operand(bitmap_scratch, MemoryChunk::kHeaderSize + kPointerSize), 1);
2740 j(second_bit == 1 ?
not_zero :
zero, has_color, has_color_distance);
2745 void MacroAssembler::GetMarkBits(Register addr_reg,
2746 Register bitmap_reg,
2747 Register mask_reg) {
2749 mov(bitmap_reg, Immediate(~Page::kPageAlignmentMask));
2750 and_(bitmap_reg, addr_reg);
2756 (Page::kPageAlignmentMask >> shift) & ~(Bitmap::kBytesPerCell - 1));
2758 add(bitmap_reg,
ecx);
2761 and_(
ecx, (1 << Bitmap::kBitsPerCellLog2) - 1);
2762 mov(mask_reg, Immediate(1));
2767 void MacroAssembler::EnsureNotWhite(
2769 Register bitmap_scratch,
2770 Register mask_scratch,
2771 Label* value_is_white_and_not_data,
2772 Label::Distance distance) {
2774 GetMarkBits(value, bitmap_scratch, mask_scratch);
2777 ASSERT(strcmp(Marking::kWhiteBitPattern,
"00") == 0);
2778 ASSERT(strcmp(Marking::kBlackBitPattern,
"10") == 0);
2779 ASSERT(strcmp(Marking::kGreyBitPattern,
"11") == 0);
2780 ASSERT(strcmp(Marking::kImpossibleBitPattern,
"01") == 0);
2786 test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
2789 if (FLAG_debug_code) {
2794 add(mask_scratch, mask_scratch);
2795 test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
2796 j(
zero, &ok, Label::kNear);
2805 Register length =
ecx;
2806 Label not_heap_number;
2807 Label is_data_object;
2811 cmp(map,
FACTORY->heap_number_map());
2812 j(
not_equal, ¬_heap_number, Label::kNear);
2813 mov(length, Immediate(HeapNumber::kSize));
2814 jmp(&is_data_object, Label::kNear);
2816 bind(¬_heap_number);
2822 Register instance_type =
ecx;
2823 movzx_b(instance_type,
FieldOperand(map, Map::kInstanceTypeOffset));
2825 j(
not_zero, value_is_white_and_not_data);
2835 j(
zero, ¬_external, Label::kNear);
2836 mov(length, Immediate(ExternalString::kSize));
2837 jmp(&is_data_object, Label::kNear);
2839 bind(¬_external);
2844 add(length, Immediate(0x04));
2848 ASSERT_EQ(SeqAsciiString::kMaxSize, SeqTwoByteString::kMaxSize);
2849 ASSERT(SeqAsciiString::kMaxSize <=
2850 static_cast<int>(0xffffffffu >> (2 +
kSmiTagSize)));
2851 imul(length,
FieldOperand(value, String::kLengthOffset));
2856 bind(&is_data_object);
2859 or_(Operand(bitmap_scratch, MemoryChunk::kHeaderSize), mask_scratch);
2861 and_(bitmap_scratch, Immediate(~Page::kPageAlignmentMask));
2862 add(Operand(bitmap_scratch, MemoryChunk::kLiveBytesOffset),
2864 if (FLAG_debug_code) {
2865 mov(length, Operand(bitmap_scratch, MemoryChunk::kLiveBytesOffset));
2866 cmp(length, Operand(bitmap_scratch, MemoryChunk::kSizeOffset));
2867 Check(
less_equal,
"Live Bytes Count overflow chunk size");
2874 void MacroAssembler::CheckEnumCache(Label* call_runtime) {
2882 isolate()->factory()->empty_fixed_array());
2890 JumpIfSmi(
edx, call_runtime);
2896 JumpIfSmi(
edx, call_runtime);
2899 Label check_prototype;
2901 j(
equal, &check_prototype, Label::kNear);
2903 cmp(
edx, isolate()->factory()->empty_fixed_array());
2907 bind(&check_prototype);
2909 cmp(
ecx, isolate()->factory()->null_value());
2915 #endif // V8_TARGET_ARCH_IA32
const intptr_t kSmiTagMask
const uint32_t kNaNOrInfinityLowerBoundUpper32
bool is_intn(int x, int n)
bool AreAliased(Register r1, Register r2, Register r3, Register r4)
#define ASSERT(condition)
bool CanTransitionToMoreGeneralFastElementsKind(ElementsKind elements_kind, bool allow_only_packed)
const int kPointerSizeLog2
const uint32_t kStringRepresentationMask
const intptr_t kObjectAlignmentMask
bool IsFastElementsKind(ElementsKind kind)
const intptr_t kHeapObjectTagMask
const uint32_t kNotStringTag
bool IsFastPackedElementsKind(ElementsKind kind)
const uint32_t kIsIndirectStringMask
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination trace on stack replacement optimize closures functions with arguments object optimize functions containing for in loops profiler considers IC stability primitive functions trigger their own optimization re try self optimization if it failed insert an interrupt check at function exit execution budget before interrupt is triggered call count before self optimization self_optimization count_based_interrupts weighted_back_edges trace_opt emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 enable use of ARMv7 instructions if enable use of MIPS FPU instructions if NULL
Operand FieldOperand(Register object, int offset)
bool IsAligned(T value, U alignment)
const uint32_t kHoleNanLower32
int TenToThe(int exponent)
MacroAssembler(Isolate *isolate, void *buffer, int size)
const uint32_t kStringTag
const uint32_t kIsNotStringMask
const int kNumSafepointRegisters
ElementsKind GetNextMoreGeneralFastElementsKind(ElementsKind elements_kind, bool allow_only_packed)
Operand ApiParameterOperand(int index)
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset kAccessCheckInfoOffset kEvalFrominstructionsOffsetOffset kThisPropertyAssignmentsOffset flag
Condition NegateCondition(Condition cond)
#define ASSERT_EQ(v1, v2)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping true
const uint32_t kIsIndirectStringTag
#define RUNTIME_ENTRY(name, nargs, ressize)
#define STATIC_ASSERT(test)
const uint32_t kAsciiStringTag
bool is_uintn(int x, int n)
const uint32_t kStringEncodingMask