30 #if defined(V8_TARGET_ARCH_IA32)
45 : Assembler(arg_isolate, buffer, size),
46 generating_stub_(
false),
47 allow_stub_calls_(
true),
49 if (isolate() !=
NULL) {
50 code_object_ = Handle<Object>(isolate()->heap()->undefined_value(),
56 void MacroAssembler::InNewSpace(
61 Label::Distance condition_met_distance) {
63 if (scratch.is(
object)) {
64 and_(scratch, Immediate(~Page::kPageAlignmentMask));
66 mov(scratch, Immediate(~Page::kPageAlignmentMask));
67 and_(scratch,
object);
70 ASSERT(MemoryChunk::IN_FROM_SPACE < 8);
71 ASSERT(MemoryChunk::IN_TO_SPACE < 8);
72 int mask = (1 << MemoryChunk::IN_FROM_SPACE)
73 | (1 << MemoryChunk::IN_TO_SPACE);
75 test_b(Operand(scratch, MemoryChunk::kFlagsOffset),
76 static_cast<uint8_t>(mask));
77 j(cc, condition_met, condition_met_distance);
81 void MacroAssembler::RememberedSetHelper(
86 MacroAssembler::RememberedSetFinalAction and_then) {
88 if (emit_debug_code()) {
90 JumpIfNotInNewSpace(
object, scratch, &ok, Label::kNear);
95 ExternalReference store_buffer =
96 ExternalReference::store_buffer_top(isolate());
97 mov(scratch, Operand::StaticVariable(store_buffer));
99 mov(Operand(scratch, 0), addr);
103 mov(Operand::StaticVariable(store_buffer), scratch);
106 test(scratch, Immediate(StoreBuffer::kStoreBufferOverflowBit));
107 if (and_then == kReturnAtEnd) {
108 Label buffer_overflowed;
109 j(
not_equal, &buffer_overflowed, Label::kNear);
111 bind(&buffer_overflowed);
113 ASSERT(and_then == kFallThroughAtEnd);
114 j(
equal, &done, Label::kNear);
116 StoreBufferOverflowStub store_buffer_overflow =
117 StoreBufferOverflowStub(save_fp);
118 CallStub(&store_buffer_overflow);
119 if (and_then == kReturnAtEnd) {
122 ASSERT(and_then == kFallThroughAtEnd);
128 void MacroAssembler::ClampDoubleToUint8(XMMRegister input_reg,
129 XMMRegister scratch_reg,
130 Register result_reg) {
133 pxor(scratch_reg, scratch_reg);
134 cvtsd2si(result_reg, input_reg);
135 test(result_reg, Immediate(0xFFFFFF00));
136 j(
zero, &done, Label::kNear);
137 cmp(result_reg, Immediate(0x80000000));
138 j(
equal, &conv_failure, Label::kNear);
139 mov(result_reg, Immediate(0));
140 setcc(
above, result_reg);
141 sub(result_reg, Immediate(1));
142 and_(result_reg, Immediate(255));
143 jmp(&done, Label::kNear);
145 Set(result_reg, Immediate(0));
146 ucomisd(input_reg, scratch_reg);
147 j(
below, &done, Label::kNear);
148 Set(result_reg, Immediate(255));
153 void MacroAssembler::ClampUint8(Register reg) {
155 test(reg, Immediate(0xFFFFFF00));
156 j(
zero, &done, Label::kNear);
163 static double kUint32Bias =
164 static_cast<double>(
static_cast<uint32_t
>(0xFFFFFFFF)) + 1;
167 void MacroAssembler::LoadUint32(XMMRegister dst,
169 XMMRegister scratch) {
171 cmp(src, Immediate(0));
181 void MacroAssembler::RecordWriteArray(Register
object,
201 Register dst = index;
206 object, dst, value, save_fp, remembered_set_action,
OMIT_SMI_CHECK);
212 if (emit_debug_code()) {
213 mov(value, Immediate(BitCast<int32_t>(
kZapValue)));
214 mov(index, Immediate(BitCast<int32_t>(
kZapValue)));
219 void MacroAssembler::RecordWriteField(
233 JumpIfSmi(value, &done, Label::kNear);
241 if (emit_debug_code()) {
244 j(
zero, &ok, Label::kNear);
250 object, dst, value, save_fp, remembered_set_action,
OMIT_SMI_CHECK);
256 if (emit_debug_code()) {
257 mov(value, Immediate(BitCast<int32_t>(
kZapValue)));
258 mov(dst, Immediate(BitCast<int32_t>(
kZapValue)));
263 void MacroAssembler::RecordWriteForMap(
271 Register address = scratch1;
272 Register value = scratch2;
273 if (emit_debug_code()) {
275 lea(address,
FieldOperand(
object, HeapObject::kMapOffset));
277 j(
zero, &ok, Label::kNear);
282 ASSERT(!
object.is(value));
283 ASSERT(!
object.is(address));
284 ASSERT(!value.is(address));
285 AssertNotSmi(
object);
287 if (!FLAG_incremental_marking) {
295 ASSERT(!isolate()->heap()->InNewSpace(*map));
296 CheckPageFlagForMap(map,
297 MemoryChunk::kPointersToHereAreInterestingMask,
306 lea(address,
FieldOperand(
object, HeapObject::kMapOffset));
307 mov(value, Immediate(map));
315 if (emit_debug_code()) {
316 mov(value, Immediate(BitCast<int32_t>(
kZapValue)));
317 mov(scratch1, Immediate(BitCast<int32_t>(
kZapValue)));
318 mov(scratch2, Immediate(BitCast<int32_t>(
kZapValue)));
323 void MacroAssembler::RecordWrite(Register
object,
329 ASSERT(!
object.is(value));
330 ASSERT(!
object.is(address));
331 ASSERT(!value.is(address));
332 AssertNotSmi(
object);
335 !FLAG_incremental_marking) {
339 if (emit_debug_code()) {
341 cmp(value, Operand(address, 0));
342 j(
equal, &ok, Label::kNear);
353 JumpIfSmi(value, &done, Label::kNear);
358 MemoryChunk::kPointersToHereAreInterestingMask,
362 CheckPageFlag(
object,
364 MemoryChunk::kPointersFromHereAreInterestingMask,
369 RecordWriteStub stub(
object, value, address, remembered_set_action, fp_mode);
376 if (emit_debug_code()) {
377 mov(address, Immediate(BitCast<int32_t>(
kZapValue)));
378 mov(value, Immediate(BitCast<int32_t>(
kZapValue)));
383 #ifdef ENABLE_DEBUGGER_SUPPORT
384 void MacroAssembler::DebugBreak() {
385 Set(
eax, Immediate(0));
386 mov(
ebx, Immediate(ExternalReference(Runtime::kDebugBreak, isolate())));
393 void MacroAssembler::Set(Register dst,
const Immediate& x) {
402 void MacroAssembler::Set(
const Operand& dst,
const Immediate& x) {
407 bool MacroAssembler::IsUnsafeImmediate(
const Immediate& x) {
408 static const int kMaxImmediateBits = 17;
410 return !
is_intn(x.x_, kMaxImmediateBits);
414 void MacroAssembler::SafeSet(Register dst,
const Immediate& x) {
415 if (IsUnsafeImmediate(x) && jit_cookie() != 0) {
416 Set(dst, Immediate(x.x_ ^ jit_cookie()));
417 xor_(dst, jit_cookie());
424 void MacroAssembler::SafePush(
const Immediate& x) {
425 if (IsUnsafeImmediate(x) && jit_cookie() != 0) {
426 push(Immediate(x.x_ ^ jit_cookie()));
427 xor_(Operand(
esp, 0), Immediate(jit_cookie()));
434 void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
436 Handle<Object> value(&isolate()->heap()->roots_array_start()[index]);
441 void MacroAssembler::CompareRoot(
const Operand& with,
442 Heap::RootListIndex index) {
444 Handle<Object> value(&isolate()->heap()->roots_array_start()[index]);
449 void MacroAssembler::CmpObjectType(Register heap_object,
452 mov(map,
FieldOperand(heap_object, HeapObject::kMapOffset));
453 CmpInstanceType(map, type);
457 void MacroAssembler::CmpInstanceType(Register map,
InstanceType type) {
459 static_cast<int8_t>(type));
463 void MacroAssembler::CheckFastElements(Register map,
465 Label::Distance distance) {
471 Map::kMaximumBitField2FastHoleyElementValue);
472 j(
above, fail, distance);
476 void MacroAssembler::CheckFastObjectElements(Register map,
478 Label::Distance distance) {
484 Map::kMaximumBitField2FastHoleySmiElementValue);
487 Map::kMaximumBitField2FastHoleyElementValue);
488 j(
above, fail, distance);
492 void MacroAssembler::CheckFastSmiElements(Register map,
494 Label::Distance distance) {
498 Map::kMaximumBitField2FastHoleySmiElementValue);
499 j(
above, fail, distance);
503 void MacroAssembler::StoreNumberToDoubleElements(
504 Register maybe_number,
508 XMMRegister scratch2,
510 bool specialize_for_processor) {
511 Label smi_value, done, maybe_nan, not_nan, is_nan, have_double_value;
512 JumpIfSmi(maybe_number, &smi_value, Label::kNear);
514 CheckMap(maybe_number,
515 isolate()->factory()->heap_number_map(),
526 ExternalReference canonical_nan_reference =
527 ExternalReference::address_of_canonical_non_hole_nan();
528 if (CpuFeatures::IsSupported(
SSE2) && specialize_for_processor) {
529 CpuFeatures::Scope use_sse2(
SSE2);
530 movdbl(scratch2,
FieldOperand(maybe_number, HeapNumber::kValueOffset));
531 bind(&have_double_value);
535 fld_d(
FieldOperand(maybe_number, HeapNumber::kValueOffset));
536 bind(&have_double_value);
544 j(
greater, &is_nan, Label::kNear);
545 cmp(
FieldOperand(maybe_number, HeapNumber::kValueOffset), Immediate(0));
548 if (CpuFeatures::IsSupported(
SSE2) && specialize_for_processor) {
549 CpuFeatures::Scope use_sse2(
SSE2);
550 movdbl(scratch2, Operand::StaticVariable(canonical_nan_reference));
552 fld_d(Operand::StaticVariable(canonical_nan_reference));
554 jmp(&have_double_value, Label::kNear);
559 mov(scratch1, maybe_number);
561 if (CpuFeatures::IsSupported(
SSE2) && specialize_for_processor) {
562 CpuFeatures::Scope fscope(
SSE2);
563 cvtsi2sd(scratch2, scratch1);
568 fild_s(Operand(
esp, 0));
576 void MacroAssembler::CompareMap(Register obj,
578 Label* early_success,
585 Map* current_map = *map;
588 current_map = current_map->LookupElementsTransitionMap(kind);
589 if (!current_map)
break;
590 j(
equal, early_success, Label::kNear);
592 Handle<Map>(current_map));
599 void MacroAssembler::CheckMap(Register obj,
605 JumpIfSmi(obj, fail);
609 CompareMap(obj, map, &success, mode);
615 void MacroAssembler::DispatchMap(Register obj,
617 Handle<Code> success,
621 JumpIfSmi(obj, &fail);
623 cmp(
FieldOperand(obj, HeapObject::kMapOffset), Immediate(map));
630 Condition MacroAssembler::IsObjectStringType(Register heap_object,
632 Register instance_type) {
633 mov(map,
FieldOperand(heap_object, HeapObject::kMapOffset));
634 movzx_b(instance_type,
FieldOperand(map, Map::kInstanceTypeOffset));
641 void MacroAssembler::IsObjectJSObjectType(Register heap_object,
645 mov(map,
FieldOperand(heap_object, HeapObject::kMapOffset));
646 IsInstanceJSObjectType(map, scratch, fail);
650 void MacroAssembler::IsInstanceJSObjectType(Register map,
653 movzx_b(scratch,
FieldOperand(map, Map::kInstanceTypeOffset));
661 void MacroAssembler::FCmp() {
662 if (CpuFeatures::IsSupported(
CMOV)) {
675 void MacroAssembler::AssertNumber(Register
object) {
676 if (emit_debug_code()) {
678 JumpIfSmi(
object, &ok);
680 isolate()->factory()->heap_number_map());
681 Check(
equal,
"Operand not a number");
687 void MacroAssembler::AssertSmi(Register
object) {
688 if (emit_debug_code()) {
690 Check(
equal,
"Operand is not a smi");
695 void MacroAssembler::AssertString(Register
object) {
696 if (emit_debug_code()) {
698 Check(
not_equal,
"Operand is a smi and not a string");
700 mov(
object,
FieldOperand(
object, HeapObject::kMapOffset));
703 Check(
below,
"Operand is not a string");
708 void MacroAssembler::AssertNotSmi(Register
object) {
709 if (emit_debug_code()) {
716 void MacroAssembler::EnterFrame(StackFrame::Type type) {
720 push(Immediate(Smi::FromInt(type)));
721 push(Immediate(CodeObject()));
722 if (emit_debug_code()) {
723 cmp(Operand(
esp, 0), Immediate(isolate()->factory()->undefined_value()));
724 Check(
not_equal,
"code object not properly patched");
729 void MacroAssembler::LeaveFrame(StackFrame::Type type) {
730 if (emit_debug_code()) {
731 cmp(Operand(
ebp, StandardFrameConstants::kMarkerOffset),
732 Immediate(Smi::FromInt(type)));
733 Check(
equal,
"stack frame types must match");
739 void MacroAssembler::EnterExitFramePrologue() {
750 push(Immediate(CodeObject()));
753 ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress,
755 ExternalReference context_address(Isolate::kContextAddress,
757 mov(Operand::StaticVariable(c_entry_fp_address),
ebp);
758 mov(Operand::StaticVariable(context_address),
esi);
762 void MacroAssembler::EnterExitFrameEpilogue(
int argc,
bool save_doubles) {
765 CpuFeatures::Scope scope(
SSE2);
767 sub(
esp, Immediate(space));
770 XMMRegister reg = XMMRegister::from_code(i);
774 sub(
esp, Immediate(argc * kPointerSize));
778 const int kFrameAlignment = OS::ActivationFrameAlignment();
779 if (kFrameAlignment > 0) {
781 and_(
esp, -kFrameAlignment);
785 mov(Operand(
ebp, ExitFrameConstants::kSPOffset),
esp);
789 void MacroAssembler::EnterExitFrame(
bool save_doubles) {
790 EnterExitFramePrologue();
793 int offset = StandardFrameConstants::kCallerSPOffset -
kPointerSize;
798 EnterExitFrameEpilogue(3, save_doubles);
802 void MacroAssembler::EnterApiExitFrame(
int argc) {
803 EnterExitFramePrologue();
804 EnterExitFrameEpilogue(argc,
false);
808 void MacroAssembler::LeaveExitFrame(
bool save_doubles) {
811 CpuFeatures::Scope scope(
SSE2);
814 XMMRegister reg = XMMRegister::from_code(i);
820 mov(
ecx, Operand(
ebp, 1 * kPointerSize));
821 mov(
ebp, Operand(
ebp, 0 * kPointerSize));
824 lea(
esp, Operand(
esi, 1 * kPointerSize));
829 LeaveExitFrameEpilogue();
832 void MacroAssembler::LeaveExitFrameEpilogue() {
834 ExternalReference context_address(Isolate::kContextAddress, isolate());
835 mov(
esi, Operand::StaticVariable(context_address));
837 mov(Operand::StaticVariable(context_address), Immediate(0));
841 ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress,
843 mov(Operand::StaticVariable(c_entry_fp_address), Immediate(0));
847 void MacroAssembler::LeaveApiExitFrame() {
851 LeaveExitFrameEpilogue();
855 void MacroAssembler::PushTryHandler(StackHandler::Kind kind,
858 STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
860 STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
861 STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
862 STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
863 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
867 if (kind == StackHandler::JS_ENTRY) {
872 push(Immediate(Smi::FromInt(0)));
879 StackHandler::IndexField::encode(handler_index) |
880 StackHandler::KindField::encode(kind);
881 push(Immediate(state));
885 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
886 push(Operand::StaticVariable(handler_address));
888 mov(Operand::StaticVariable(handler_address),
esp);
892 void MacroAssembler::PopTryHandler() {
894 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
895 pop(Operand::StaticVariable(handler_address));
896 add(
esp, Immediate(StackHandlerConstants::kSize - kPointerSize));
900 void MacroAssembler::JumpToHandlerEntry() {
905 shr(
edx, StackHandler::kKindWidth);
913 void MacroAssembler::Throw(Register value) {
915 STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
917 STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
918 STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
919 STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
920 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
923 if (!value.is(
eax)) {
927 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
928 mov(
esp, Operand::StaticVariable(handler_address));
930 pop(Operand::StaticVariable(handler_address));
945 j(
zero, &skip, Label::kNear);
946 mov(Operand(
ebp, StandardFrameConstants::kContextOffset),
esi);
949 JumpToHandlerEntry();
953 void MacroAssembler::ThrowUncatchable(Register value) {
955 STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
957 STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
958 STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
959 STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
960 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
963 if (!value.is(
eax)) {
967 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
968 mov(
esp, Operand::StaticVariable(handler_address));
971 Label fetch_next, check_kind;
972 jmp(&check_kind, Label::kNear);
974 mov(
esp, Operand(
esp, StackHandlerConstants::kNextOffset));
978 test(Operand(
esp, StackHandlerConstants::kStateOffset),
979 Immediate(StackHandler::KindField::kMask));
983 pop(Operand::StaticVariable(handler_address));
993 JumpToHandlerEntry();
997 void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
1000 Label same_contexts;
1002 ASSERT(!holder_reg.is(scratch));
1005 mov(scratch, Operand(
ebp, StandardFrameConstants::kContextOffset));
1008 if (emit_debug_code()) {
1009 cmp(scratch, Immediate(0));
1010 Check(
not_equal,
"we should not have an empty lexical context");
1014 Context::kHeaderSize + Context::GLOBAL_OBJECT_INDEX *
kPointerSize;
1016 mov(scratch,
FieldOperand(scratch, GlobalObject::kNativeContextOffset));
1019 if (emit_debug_code()) {
1022 mov(scratch,
FieldOperand(scratch, HeapObject::kMapOffset));
1023 cmp(scratch, isolate()->factory()->native_context_map());
1024 Check(
equal,
"JSGlobalObject::native_context should be a native context.");
1029 cmp(scratch,
FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
1030 j(
equal, &same_contexts);
1041 FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
1044 if (emit_debug_code()) {
1045 cmp(holder_reg, isolate()->factory()->null_value());
1046 Check(
not_equal,
"JSGlobalProxy::context() should not be null.");
1050 mov(holder_reg,
FieldOperand(holder_reg, HeapObject::kMapOffset));
1051 cmp(holder_reg, isolate()->factory()->native_context_map());
1052 Check(
equal,
"JSGlobalObject::native_context should be a native context.");
1056 int token_offset = Context::kHeaderSize +
1063 bind(&same_contexts);
1071 void MacroAssembler::GetNumberHash(Register
r0, Register scratch) {
1073 if (Serializer::enabled()) {
1074 ExternalReference roots_array_start =
1075 ExternalReference::roots_array_start(isolate());
1076 mov(scratch, Immediate(Heap::kHashSeedRootIndex));
1083 xor_(r0, Immediate(seed));
1096 lea(r0, Operand(r0, r0,
times_4, 0));
1111 void MacroAssembler::LoadFromNumberDictionary(Label* miss,
1136 GetNumberHash(r0, r1);
1139 mov(r1,
FieldOperand(elements, SeededNumberDictionary::kCapacityOffset));
1144 const int kProbes = 4;
1145 for (
int i = 0; i < kProbes; i++) {
1150 add(r2, Immediate(SeededNumberDictionary::GetProbeOffset(i)));
1155 ASSERT(SeededNumberDictionary::kEntrySize == 3);
1156 lea(r2, Operand(r2, r2,
times_2, 0));
1162 SeededNumberDictionary::kElementsStartOffset));
1163 if (i != (kProbes - 1)) {
1172 const int kDetailsOffset =
1173 SeededNumberDictionary::kElementsStartOffset + 2 *
kPointerSize;
1176 Immediate(PropertyDetails::TypeField::kMask <<
kSmiTagSize));
1180 const int kValueOffset =
1181 SeededNumberDictionary::kElementsStartOffset +
kPointerSize;
1186 void MacroAssembler::LoadAllocationTopHelper(Register result,
1189 ExternalReference new_space_allocation_top =
1190 ExternalReference::new_space_allocation_top_address(isolate());
1198 cmp(result, Operand::StaticVariable(new_space_allocation_top));
1199 Check(
equal,
"Unexpected allocation top");
1205 if (scratch.is(
no_reg)) {
1206 mov(result, Operand::StaticVariable(new_space_allocation_top));
1208 mov(scratch, Immediate(new_space_allocation_top));
1209 mov(result, Operand(scratch, 0));
1214 void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
1216 if (emit_debug_code()) {
1218 Check(
zero,
"Unaligned allocation in new space");
1221 ExternalReference new_space_allocation_top =
1222 ExternalReference::new_space_allocation_top_address(isolate());
1225 if (scratch.is(
no_reg)) {
1226 mov(Operand::StaticVariable(new_space_allocation_top), result_end);
1228 mov(Operand(scratch, 0), result_end);
1233 void MacroAssembler::AllocateInNewSpace(
int object_size,
1235 Register result_end,
1239 if (!FLAG_inline_new) {
1240 if (emit_debug_code()) {
1242 mov(result, Immediate(0x7091));
1243 if (result_end.is_valid()) {
1244 mov(result_end, Immediate(0x7191));
1246 if (scratch.is_valid()) {
1247 mov(scratch, Immediate(0x7291));
1253 ASSERT(!result.is(result_end));
1256 LoadAllocationTopHelper(result, scratch, flags);
1258 Register top_reg = result_end.is_valid() ? result_end : result;
1261 ExternalReference new_space_allocation_limit =
1262 ExternalReference::new_space_allocation_limit_address(isolate());
1264 if (!top_reg.is(result)) {
1265 mov(top_reg, result);
1267 add(top_reg, Immediate(object_size));
1268 j(
carry, gc_required);
1269 cmp(top_reg, Operand::StaticVariable(new_space_allocation_limit));
1270 j(
above, gc_required);
1273 UpdateAllocationTopHelper(top_reg, scratch);
1276 if (top_reg.is(result)) {
1280 sub(result, Immediate(object_size));
1282 }
else if ((flags & TAG_OBJECT) != 0) {
1288 void MacroAssembler::AllocateInNewSpace(
int header_size,
1290 Register element_count,
1292 Register result_end,
1296 if (!FLAG_inline_new) {
1297 if (emit_debug_code()) {
1299 mov(result, Immediate(0x7091));
1300 mov(result_end, Immediate(0x7191));
1301 if (scratch.is_valid()) {
1302 mov(scratch, Immediate(0x7291));
1309 ASSERT(!result.is(result_end));
1312 LoadAllocationTopHelper(result, scratch, flags);
1315 ExternalReference new_space_allocation_limit =
1316 ExternalReference::new_space_allocation_limit_address(isolate());
1320 lea(result_end, Operand(element_count, element_size, header_size));
1321 add(result_end, result);
1322 j(
carry, gc_required);
1323 cmp(result_end, Operand::StaticVariable(new_space_allocation_limit));
1324 j(
above, gc_required);
1327 if ((flags & TAG_OBJECT) != 0) {
1332 UpdateAllocationTopHelper(result_end, scratch);
1336 void MacroAssembler::AllocateInNewSpace(Register object_size,
1338 Register result_end,
1342 if (!FLAG_inline_new) {
1343 if (emit_debug_code()) {
1345 mov(result, Immediate(0x7091));
1346 mov(result_end, Immediate(0x7191));
1347 if (scratch.is_valid()) {
1348 mov(scratch, Immediate(0x7291));
1355 ASSERT(!result.is(result_end));
1358 LoadAllocationTopHelper(result, scratch, flags);
1361 ExternalReference new_space_allocation_limit =
1362 ExternalReference::new_space_allocation_limit_address(isolate());
1363 if (!object_size.is(result_end)) {
1364 mov(result_end, object_size);
1366 add(result_end, result);
1367 j(
carry, gc_required);
1368 cmp(result_end, Operand::StaticVariable(new_space_allocation_limit));
1369 j(
above, gc_required);
1372 if ((flags & TAG_OBJECT) != 0) {
1377 UpdateAllocationTopHelper(result_end, scratch);
1381 void MacroAssembler::UndoAllocationInNewSpace(Register
object) {
1382 ExternalReference new_space_allocation_top =
1383 ExternalReference::new_space_allocation_top_address(isolate());
1388 cmp(
object, Operand::StaticVariable(new_space_allocation_top));
1389 Check(
below,
"Undo allocation of non allocated memory");
1391 mov(Operand::StaticVariable(new_space_allocation_top),
object);
1395 void MacroAssembler::AllocateHeapNumber(Register result,
1398 Label* gc_required) {
1400 AllocateInNewSpace(HeapNumber::kSize,
1409 Immediate(isolate()->factory()->heap_number_map()));
1413 void MacroAssembler::AllocateTwoByteString(Register result,
1418 Label* gc_required) {
1428 AllocateInNewSpace(SeqTwoByteString::kHeaderSize,
1439 Immediate(isolate()->factory()->string_map()));
1440 mov(scratch1, length);
1442 mov(
FieldOperand(result, String::kLengthOffset), scratch1);
1444 Immediate(String::kEmptyHashField));
1448 void MacroAssembler::AllocateAsciiString(Register result,
1453 Label* gc_required) {
1457 mov(scratch1, length);
1463 AllocateInNewSpace(SeqAsciiString::kHeaderSize,
1474 Immediate(isolate()->factory()->ascii_string_map()));
1475 mov(scratch1, length);
1477 mov(
FieldOperand(result, String::kLengthOffset), scratch1);
1479 Immediate(String::kEmptyHashField));
1483 void MacroAssembler::AllocateAsciiString(Register result,
1487 Label* gc_required) {
1491 AllocateInNewSpace(SeqAsciiString::SizeFor(length),
1500 Immediate(isolate()->factory()->ascii_string_map()));
1502 Immediate(Smi::FromInt(length)));
1504 Immediate(String::kEmptyHashField));
1508 void MacroAssembler::AllocateTwoByteConsString(Register result,
1511 Label* gc_required) {
1513 AllocateInNewSpace(ConsString::kSize,
1522 Immediate(isolate()->factory()->cons_string_map()));
1526 void MacroAssembler::AllocateAsciiConsString(Register result,
1529 Label* gc_required) {
1531 AllocateInNewSpace(ConsString::kSize,
1540 Immediate(isolate()->factory()->cons_ascii_string_map()));
1544 void MacroAssembler::AllocateTwoByteSlicedString(Register result,
1547 Label* gc_required) {
1549 AllocateInNewSpace(SlicedString::kSize,
1558 Immediate(isolate()->factory()->sliced_string_map()));
1562 void MacroAssembler::AllocateAsciiSlicedString(Register result,
1565 Label* gc_required) {
1567 AllocateInNewSpace(SlicedString::kSize,
1576 Immediate(isolate()->factory()->sliced_ascii_string_map()));
1588 void MacroAssembler::CopyBytes(Register source,
1589 Register destination,
1592 Label loop, done, short_string, short_loop;
1594 cmp(length, Immediate(10));
1604 mov(scratch, Operand(source, length,
times_1, -4));
1605 mov(Operand(destination, length,
times_1, -4), scratch);
1609 and_(scratch, Immediate(0x3));
1610 add(destination, scratch);
1613 bind(&short_string);
1614 test(length, length);
1618 mov_b(scratch, Operand(source, 0));
1619 mov_b(Operand(destination, 0), scratch);
1629 void MacroAssembler::InitializeFieldsWithFiller(Register start_offset,
1630 Register end_offset,
1635 mov(Operand(start_offset, 0), filler);
1636 add(start_offset, Immediate(kPointerSize));
1638 cmp(start_offset, end_offset);
1643 void MacroAssembler::BooleanBitTest(Register
object,
1650 test_b(
FieldOperand(
object, field_offset + byte_index),
1651 static_cast<byte>(1 << byte_bit_index));
1656 void MacroAssembler::NegativeZeroTest(Register result,
1658 Label* then_label) {
1660 test(result, result);
1663 j(
sign, then_label);
1668 void MacroAssembler::NegativeZeroTest(Register result,
1672 Label* then_label) {
1674 test(result, result);
1678 j(
sign, then_label);
1683 void MacroAssembler::TryGetFunctionPrototype(Register
function,
1687 bool miss_on_bound_function) {
1689 JumpIfSmi(
function, miss);
1695 if (miss_on_bound_function) {
1698 FieldOperand(
function, JSFunction::kSharedFunctionInfoOffset));
1699 BooleanBitTest(scratch, SharedFunctionInfo::kCompilerHintsOffset,
1700 SharedFunctionInfo::kBoundFunction);
1706 movzx_b(scratch,
FieldOperand(result, Map::kBitFieldOffset));
1707 test(scratch, Immediate(1 << Map::kHasNonInstancePrototype));
1712 FieldOperand(
function, JSFunction::kPrototypeOrInitialMapOffset));
1717 cmp(result, Immediate(isolate()->factory()->the_hole_value()));
1722 CmpObjectType(result,
MAP_TYPE, scratch);
1726 mov(result,
FieldOperand(result, Map::kPrototypeOffset));
1731 bind(&non_instance);
1732 mov(result,
FieldOperand(result, Map::kConstructorOffset));
1739 void MacroAssembler::CallStub(CodeStub* stub, TypeFeedbackId ast_id) {
1740 ASSERT(AllowThisStubCall(stub));
1741 call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id);
1745 void MacroAssembler::TailCallStub(CodeStub* stub) {
1746 ASSERT(allow_stub_calls_ || stub->CompilingCallsToThisStubIsGCSafe());
1747 jmp(stub->GetCode(), RelocInfo::CODE_TARGET);
1751 void MacroAssembler::StubReturn(
int argc) {
1752 ASSERT(argc >= 1 && generating_stub());
1753 ret((argc - 1) * kPointerSize);
1757 bool MacroAssembler::AllowThisStubCall(CodeStub* stub) {
1758 if (!has_frame_ && stub->SometimesSetsUpAFrame())
return false;
1759 return allow_stub_calls_ || stub->CompilingCallsToThisStubIsGCSafe();
1763 void MacroAssembler::IllegalOperation(
int num_arguments) {
1764 if (num_arguments > 0) {
1765 add(
esp, Immediate(num_arguments * kPointerSize));
1767 mov(
eax, Immediate(isolate()->factory()->undefined_value()));
1771 void MacroAssembler::IndexFromHash(Register hash, Register index) {
1776 (1 << String::kArrayIndexValueBits));
1779 and_(hash, String::kArrayIndexValueMask);
1784 if (!index.is(hash)) {
1790 void MacroAssembler::CallRuntime(Runtime::FunctionId
id,
int num_arguments) {
1791 CallRuntime(Runtime::FunctionForId(
id), num_arguments);
1795 void MacroAssembler::CallRuntimeSaveDoubles(Runtime::FunctionId
id) {
1796 const Runtime::Function*
function = Runtime::FunctionForId(
id);
1797 Set(
eax, Immediate(function->nargs));
1798 mov(
ebx, Immediate(ExternalReference(
function, isolate())));
1804 void MacroAssembler::CallRuntime(
const Runtime::Function* f,
1805 int num_arguments) {
1809 if (f->nargs >= 0 && f->nargs != num_arguments) {
1810 IllegalOperation(num_arguments);
1818 Set(
eax, Immediate(num_arguments));
1819 mov(
ebx, Immediate(ExternalReference(f, isolate())));
1825 void MacroAssembler::CallExternalReference(ExternalReference ref,
1826 int num_arguments) {
1827 mov(
eax, Immediate(num_arguments));
1828 mov(
ebx, Immediate(ref));
1835 void MacroAssembler::TailCallExternalReference(
const ExternalReference& ext,
1842 Set(
eax, Immediate(num_arguments));
1843 JumpToExternalReference(ext);
1847 void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
1850 TailCallExternalReference(ExternalReference(fid, isolate()),
1862 #if defined(USING_BSD_ABI) || defined(__MINGW32__) || defined(__CYGWIN__)
1863 static const bool kReturnHandlesDirectly =
true;
1865 static const bool kReturnHandlesDirectly =
false;
1871 esp, (index + (kReturnHandlesDirectly ? 0 : 1)) * kPointerSize);
1875 void MacroAssembler::PrepareCallApiFunction(
int argc) {
1876 if (kReturnHandlesDirectly) {
1877 EnterApiExitFrame(argc);
1880 if (emit_debug_code()) {
1885 EnterApiExitFrame(argc + 2);
1895 lea(
esi, Operand(
esp, (argc + 1) * kPointerSize));
1896 mov(Operand(
esp, 0 * kPointerSize),
esi);
1897 if (emit_debug_code()) {
1898 mov(Operand(
esi, 0), Immediate(0));
1904 void MacroAssembler::CallApiFunctionAndReturn(
Address function_address,
1906 ExternalReference next_address =
1907 ExternalReference::handle_scope_next_address();
1908 ExternalReference limit_address =
1909 ExternalReference::handle_scope_limit_address();
1910 ExternalReference level_address =
1911 ExternalReference::handle_scope_level_address();
1914 mov(
ebx, Operand::StaticVariable(next_address));
1915 mov(
edi, Operand::StaticVariable(limit_address));
1916 add(Operand::StaticVariable(level_address), Immediate(1));
1921 if (!kReturnHandlesDirectly) {
1924 mov(
eax, Operand(
esi, 0));
1929 Label promote_scheduled_exception;
1930 Label delete_allocated_handles;
1931 Label leave_exit_frame;
1935 j(
zero, &empty_handle);
1937 mov(
eax, Operand(
eax, 0));
1941 mov(Operand::StaticVariable(next_address),
ebx);
1942 sub(Operand::StaticVariable(level_address), Immediate(1));
1944 cmp(
edi, Operand::StaticVariable(limit_address));
1945 j(
not_equal, &delete_allocated_handles);
1946 bind(&leave_exit_frame);
1949 ExternalReference scheduled_exception_address =
1950 ExternalReference::scheduled_exception_address(isolate());
1951 cmp(Operand::StaticVariable(scheduled_exception_address),
1952 Immediate(isolate()->factory()->the_hole_value()));
1953 j(
not_equal, &promote_scheduled_exception);
1955 #if ENABLE_EXTRA_CHECKS
1958 Register return_value =
eax;
1961 JumpIfSmi(return_value, &ok, Label::kNear);
1962 mov(map,
FieldOperand(return_value, HeapObject::kMapOffset));
1965 j(
below, &ok, Label::kNear);
1970 cmp(map, isolate()->factory()->heap_number_map());
1971 j(
equal, &ok, Label::kNear);
1973 cmp(return_value, isolate()->factory()->undefined_value());
1974 j(
equal, &ok, Label::kNear);
1976 cmp(return_value, isolate()->factory()->true_value());
1977 j(
equal, &ok, Label::kNear);
1979 cmp(return_value, isolate()->factory()->false_value());
1980 j(
equal, &ok, Label::kNear);
1982 cmp(return_value, isolate()->factory()->null_value());
1983 j(
equal, &ok, Label::kNear);
1985 Abort(
"API call returned invalid object");
1990 LeaveApiExitFrame();
1991 ret(stack_space * kPointerSize);
1993 bind(&empty_handle);
1995 mov(
eax, isolate()->factory()->undefined_value());
1998 bind(&promote_scheduled_exception);
1999 TailCallRuntime(Runtime::kPromoteScheduledException, 0, 1);
2002 ExternalReference delete_extensions =
2003 ExternalReference::delete_handle_scope_extensions(isolate());
2004 bind(&delete_allocated_handles);
2005 mov(Operand::StaticVariable(limit_address),
edi);
2007 mov(Operand(
esp, 0), Immediate(ExternalReference::isolate_address()));
2008 mov(
eax, Immediate(delete_extensions));
2011 jmp(&leave_exit_frame);
2015 void MacroAssembler::JumpToExternalReference(
const ExternalReference& ext) {
2017 mov(
ebx, Immediate(ext));
2019 jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
2023 void MacroAssembler::SetCallKind(Register dst,
CallKind call_kind) {
2040 void MacroAssembler::InvokePrologue(
const ParameterCount& expected,
2041 const ParameterCount& actual,
2042 Handle<Code> code_constant,
2043 const Operand& code_operand,
2045 bool* definitely_mismatches,
2047 Label::Distance done_near,
2048 const CallWrapper& call_wrapper,
2050 bool definitely_matches =
false;
2051 *definitely_mismatches =
false;
2053 if (expected.is_immediate()) {
2054 ASSERT(actual.is_immediate());
2055 if (expected.immediate() == actual.immediate()) {
2056 definitely_matches =
true;
2058 mov(
eax, actual.immediate());
2059 const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
2060 if (expected.immediate() == sentinel) {
2065 definitely_matches =
true;
2067 *definitely_mismatches =
true;
2068 mov(
ebx, expected.immediate());
2072 if (actual.is_immediate()) {
2076 cmp(expected.reg(), actual.immediate());
2079 mov(
eax, actual.immediate());
2080 }
else if (!expected.reg().is(actual.reg())) {
2083 cmp(expected.reg(), actual.reg());
2090 if (!definitely_matches) {
2091 Handle<Code> adaptor =
2092 isolate()->builtins()->ArgumentsAdaptorTrampoline();
2093 if (!code_constant.is_null()) {
2094 mov(
edx, Immediate(code_constant));
2096 }
else if (!code_operand.is_reg(
edx)) {
2097 mov(
edx, code_operand);
2101 call_wrapper.BeforeCall(CallSize(adaptor, RelocInfo::CODE_TARGET));
2102 SetCallKind(
ecx, call_kind);
2103 call(adaptor, RelocInfo::CODE_TARGET);
2104 call_wrapper.AfterCall();
2105 if (!*definitely_mismatches) {
2106 jmp(done, done_near);
2109 SetCallKind(
ecx, call_kind);
2110 jmp(adaptor, RelocInfo::CODE_TARGET);
2117 void MacroAssembler::InvokeCode(
const Operand&
code,
2118 const ParameterCount& expected,
2119 const ParameterCount& actual,
2121 const CallWrapper& call_wrapper,
2127 bool definitely_mismatches =
false;
2128 InvokePrologue(expected, actual, Handle<Code>::null(), code,
2129 &done, &definitely_mismatches, flag, Label::kNear,
2130 call_wrapper, call_kind);
2131 if (!definitely_mismatches) {
2133 call_wrapper.BeforeCall(CallSize(code));
2134 SetCallKind(
ecx, call_kind);
2136 call_wrapper.AfterCall();
2139 SetCallKind(
ecx, call_kind);
2147 void MacroAssembler::InvokeCode(Handle<Code> code,
2148 const ParameterCount& expected,
2149 const ParameterCount& actual,
2150 RelocInfo::Mode rmode,
2152 const CallWrapper& call_wrapper,
2158 Operand dummy(
eax, 0);
2159 bool definitely_mismatches =
false;
2160 InvokePrologue(expected, actual, code, dummy, &done, &definitely_mismatches,
2161 flag, Label::kNear, call_wrapper, call_kind);
2162 if (!definitely_mismatches) {
2164 call_wrapper.BeforeCall(CallSize(code, rmode));
2165 SetCallKind(
ecx, call_kind);
2167 call_wrapper.AfterCall();
2170 SetCallKind(
ecx, call_kind);
2178 void MacroAssembler::InvokeFunction(Register fun,
2179 const ParameterCount& actual,
2181 const CallWrapper& call_wrapper,
2192 ParameterCount expected(
ebx);
2194 expected, actual, flag, call_wrapper, call_kind);
2198 void MacroAssembler::InvokeFunction(Handle<JSFunction>
function,
2199 const ParameterCount& actual,
2201 const CallWrapper& call_wrapper,
2207 LoadHeapObject(
edi,
function);
2210 ParameterCount expected(function->shared()->formal_parameter_count());
2215 expected, actual, flag, call_wrapper, call_kind);
2219 void MacroAssembler::InvokeBuiltin(Builtins::JavaScript
id,
2221 const CallWrapper& call_wrapper) {
2228 ParameterCount expected(0);
2229 GetBuiltinFunction(
edi,
id);
2235 void MacroAssembler::GetBuiltinFunction(Register target,
2236 Builtins::JavaScript
id) {
2238 mov(target, Operand(
esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
2239 mov(target,
FieldOperand(target, GlobalObject::kBuiltinsOffset));
2241 JSBuiltinsObject::OffsetOfFunctionWithId(
id)));
2245 void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript
id) {
2248 GetBuiltinFunction(
edi,
id);
2254 void MacroAssembler::LoadContext(Register dst,
int context_chain_length) {
2255 if (context_chain_length > 0) {
2257 mov(dst, Operand(
esi, Context::SlotOffset(Context::PREVIOUS_INDEX)));
2258 for (
int i = 1; i < context_chain_length; i++) {
2259 mov(dst, Operand(dst, Context::SlotOffset(Context::PREVIOUS_INDEX)));
2272 if (emit_debug_code()) {
2274 isolate()->factory()->with_context_map());
2275 Check(
not_equal,
"Variable resolved to with context.");
2280 void MacroAssembler::LoadTransitionedArrayMapConditional(
2283 Register map_in_out,
2285 Label* no_map_match) {
2287 mov(scratch, Operand(
esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
2288 mov(scratch,
FieldOperand(scratch, GlobalObject::kNativeContextOffset));
2291 mov(scratch, Operand(scratch,
2292 Context::SlotOffset(Context::JS_ARRAY_MAPS_INDEX)));
2294 size_t offset = expected_kind * kPointerSize +
2295 FixedArrayBase::kHeaderSize;
2300 offset = transitioned_kind * kPointerSize +
2301 FixedArrayBase::kHeaderSize;
2306 void MacroAssembler::LoadInitialArrayMap(
2307 Register function_in, Register scratch,
2308 Register map_out,
bool can_have_holes) {
2309 ASSERT(!function_in.is(map_out));
2312 JSFunction::kPrototypeOrInitialMapOffset));
2313 if (!FLAG_smi_only_arrays) {
2320 }
else if (can_have_holes) {
2331 void MacroAssembler::LoadGlobalFunction(
int index, Register
function) {
2334 Operand(
esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
2336 mov(
function,
FieldOperand(
function, GlobalObject::kNativeContextOffset));
2338 mov(
function, Operand(
function, Context::SlotOffset(index)));
2342 void MacroAssembler::LoadGlobalFunctionInitialMap(Register
function,
2345 mov(map,
FieldOperand(
function, JSFunction::kPrototypeOrInitialMapOffset));
2346 if (emit_debug_code()) {
2348 CheckMap(map, isolate()->factory()->meta_map(), &fail,
DO_SMI_CHECK);
2351 Abort(
"Global functions must have initial map");
2359 void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Register src) {
2360 mov(SafepointRegisterSlot(dst), src);
2364 void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Immediate src) {
2365 mov(SafepointRegisterSlot(dst), src);
2369 void MacroAssembler::LoadFromSafepointRegisterSlot(Register dst, Register src) {
2370 mov(dst, SafepointRegisterSlot(src));
2374 Operand MacroAssembler::SafepointRegisterSlot(Register reg) {
2375 return Operand(
esp, SafepointRegisterStackIndex(reg.code()) * kPointerSize);
2379 int MacroAssembler::SafepointRegisterStackIndex(
int reg_code) {
2388 void MacroAssembler::LoadHeapObject(Register result,
2389 Handle<HeapObject>
object) {
2390 if (isolate()->heap()->InNewSpace(*
object)) {
2391 Handle<JSGlobalPropertyCell> cell =
2392 isolate()->factory()->NewJSGlobalPropertyCell(
object);
2393 mov(result, Operand::Cell(cell));
2395 mov(result,
object);
2400 void MacroAssembler::PushHeapObject(Handle<HeapObject>
object) {
2401 if (isolate()->heap()->InNewSpace(*
object)) {
2402 Handle<JSGlobalPropertyCell> cell =
2403 isolate()->factory()->NewJSGlobalPropertyCell(
object);
2404 push(Operand::Cell(cell));
2411 void MacroAssembler::Ret() {
2416 void MacroAssembler::Ret(
int bytes_dropped, Register scratch) {
2421 add(
esp, Immediate(bytes_dropped));
2428 void MacroAssembler::Drop(
int stack_elements) {
2429 if (stack_elements > 0) {
2430 add(
esp, Immediate(stack_elements * kPointerSize));
2435 void MacroAssembler::Move(Register dst, Register src) {
2442 void MacroAssembler::SetCounter(StatsCounter* counter,
int value) {
2443 if (FLAG_native_code_counters && counter->Enabled()) {
2444 mov(Operand::StaticVariable(ExternalReference(counter)), Immediate(value));
2449 void MacroAssembler::IncrementCounter(StatsCounter* counter,
int value) {
2451 if (FLAG_native_code_counters && counter->Enabled()) {
2452 Operand operand = Operand::StaticVariable(ExternalReference(counter));
2456 add(operand, Immediate(value));
2462 void MacroAssembler::DecrementCounter(StatsCounter* counter,
int value) {
2464 if (FLAG_native_code_counters && counter->Enabled()) {
2465 Operand operand = Operand::StaticVariable(ExternalReference(counter));
2469 sub(operand, Immediate(value));
2475 void MacroAssembler::IncrementCounter(
Condition cc,
2476 StatsCounter* counter,
2479 if (FLAG_native_code_counters && counter->Enabled()) {
2483 IncrementCounter(counter, value);
2490 void MacroAssembler::DecrementCounter(
Condition cc,
2491 StatsCounter* counter,
2494 if (FLAG_native_code_counters && counter->Enabled()) {
2498 DecrementCounter(counter, value);
2505 void MacroAssembler::Assert(
Condition cc,
const char* msg) {
2506 if (emit_debug_code()) Check(cc, msg);
2510 void MacroAssembler::AssertFastElements(Register elements) {
2511 if (emit_debug_code()) {
2512 Factory* factory = isolate()->factory();
2515 Immediate(factory->fixed_array_map()));
2518 Immediate(factory->fixed_double_array_map()));
2521 Immediate(factory->fixed_cow_array_map()));
2523 Abort(
"JSObject with fast elements map has slow elements");
2529 void MacroAssembler::Check(
Condition cc,
const char* msg) {
2538 void MacroAssembler::CheckStackAlignment() {
2539 int frame_alignment = OS::ActivationFrameAlignment();
2540 int frame_alignment_mask = frame_alignment - 1;
2541 if (frame_alignment > kPointerSize) {
2543 Label alignment_as_expected;
2544 test(
esp, Immediate(frame_alignment_mask));
2545 j(
zero, &alignment_as_expected);
2548 bind(&alignment_as_expected);
2553 void MacroAssembler::Abort(
const char* msg) {
2559 intptr_t
p1 =
reinterpret_cast<intptr_t
>(msg);
2561 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
2564 RecordComment(
"Abort message: ");
2570 push(Immediate(p0));
2571 push(Immediate(reinterpret_cast<intptr_t>(Smi::FromInt(p1 - p0))));
2577 CallRuntime(Runtime::kAbort, 2);
2579 CallRuntime(Runtime::kAbort, 2);
2586 void MacroAssembler::LoadInstanceDescriptors(Register map,
2587 Register descriptors) {
2588 mov(descriptors,
FieldOperand(map, Map::kDescriptorsOffset));
2592 void MacroAssembler::NumberOfOwnDescriptors(Register dst, Register map) {
2594 DecodeField<Map::NumberOfOwnDescriptorsBits>(dst);
2598 void MacroAssembler::LoadPowerOf2(XMMRegister dst,
2602 HeapNumber::kExponentBits));
2603 mov(scratch, Immediate(power + HeapNumber::kExponentBias));
2605 psllq(dst, HeapNumber::kMantissaBits);
2609 void MacroAssembler::JumpIfInstanceTypeIsNotSequentialAscii(
2610 Register instance_type,
2613 if (!scratch.is(instance_type)) {
2614 mov(scratch, instance_type);
2623 void MacroAssembler::JumpIfNotBothSequentialAsciiStrings(Register object1,
2630 mov(scratch1, object1);
2631 and_(scratch1, object2);
2632 JumpIfSmi(scratch1, failure);
2635 mov(scratch1,
FieldOperand(object1, HeapObject::kMapOffset));
2636 mov(scratch2,
FieldOperand(object2, HeapObject::kMapOffset));
2637 movzx_b(scratch1,
FieldOperand(scratch1, Map::kInstanceTypeOffset));
2638 movzx_b(scratch2,
FieldOperand(scratch2, Map::kInstanceTypeOffset));
2641 const int kFlatAsciiStringMask =
2645 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
2646 and_(scratch1, kFlatAsciiStringMask);
2647 and_(scratch2, kFlatAsciiStringMask);
2648 lea(scratch1, Operand(scratch1, scratch2,
times_8, 0));
2649 cmp(scratch1, kFlatAsciiStringTag | (kFlatAsciiStringTag << 3));
2654 void MacroAssembler::PrepareCallCFunction(
int num_arguments, Register scratch) {
2655 int frame_alignment = OS::ActivationFrameAlignment();
2656 if (frame_alignment != 0) {
2660 sub(
esp, Immediate((num_arguments + 1) * kPointerSize));
2662 and_(
esp, -frame_alignment);
2663 mov(Operand(
esp, num_arguments * kPointerSize), scratch);
2665 sub(
esp, Immediate(num_arguments * kPointerSize));
2670 void MacroAssembler::CallCFunction(ExternalReference
function,
2671 int num_arguments) {
2673 mov(
eax, Immediate(
function));
2674 CallCFunction(
eax, num_arguments);
2678 void MacroAssembler::CallCFunction(Register
function,
2679 int num_arguments) {
2682 if (emit_debug_code()) {
2683 CheckStackAlignment();
2687 if (OS::ActivationFrameAlignment() != 0) {
2688 mov(
esp, Operand(
esp, num_arguments * kPointerSize));
2690 add(
esp, Immediate(num_arguments * kPointerSize));
2695 bool AreAliased(Register r1, Register r2, Register
r3, Register
r4) {
2696 if (r1.is(r2))
return true;
2697 if (r1.is(r3))
return true;
2698 if (r1.is(r4))
return true;
2699 if (r2.is(r3))
return true;
2700 if (r2.is(r4))
return true;
2701 if (r3.is(r4))
return true;
2706 CodePatcher::CodePatcher(
byte* address,
int size)
2707 : address_(address),
2709 masm_(
NULL, address, size + Assembler::kGap) {
2713 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2717 CodePatcher::~CodePatcher() {
2719 CPU::FlushICache(address_, size_);
2722 ASSERT(masm_.pc_ == address_ + size_);
2723 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2727 void MacroAssembler::CheckPageFlag(
2732 Label* condition_met,
2733 Label::Distance condition_met_distance) {
2735 if (scratch.is(
object)) {
2736 and_(scratch, Immediate(~Page::kPageAlignmentMask));
2738 mov(scratch, Immediate(~Page::kPageAlignmentMask));
2739 and_(scratch,
object);
2742 test_b(Operand(scratch, MemoryChunk::kFlagsOffset),
2743 static_cast<uint8_t>(mask));
2745 test(Operand(scratch, MemoryChunk::kFlagsOffset), Immediate(mask));
2747 j(cc, condition_met, condition_met_distance);
2751 void MacroAssembler::CheckPageFlagForMap(
2755 Label* condition_met,
2756 Label::Distance condition_met_distance) {
2758 Page* page = Page::FromAddress(map->address());
2759 ExternalReference reference(ExternalReference::page_flags(page));
2762 ASSERT(!isolate()->heap()->mark_compact_collector()->
2763 IsOnEvacuationCandidate(*map));
2765 test_b(Operand::StaticVariable(reference), static_cast<uint8_t>(mask));
2767 test(Operand::StaticVariable(reference), Immediate(mask));
2769 j(cc, condition_met, condition_met_distance);
2773 void MacroAssembler::JumpIfBlack(Register
object,
2777 Label::Distance on_black_near) {
2778 HasColor(
object, scratch0, scratch1,
2779 on_black, on_black_near,
2781 ASSERT(strcmp(Marking::kBlackBitPattern,
"10") == 0);
2785 void MacroAssembler::HasColor(Register
object,
2786 Register bitmap_scratch,
2787 Register mask_scratch,
2789 Label::Distance has_color_distance,
2794 GetMarkBits(
object, bitmap_scratch, mask_scratch);
2796 Label other_color, word_boundary;
2797 test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
2798 j(first_bit == 1 ?
zero :
not_zero, &other_color, Label::kNear);
2799 add(mask_scratch, mask_scratch);
2800 j(
zero, &word_boundary, Label::kNear);
2801 test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
2802 j(second_bit == 1 ?
not_zero :
zero, has_color, has_color_distance);
2803 jmp(&other_color, Label::kNear);
2805 bind(&word_boundary);
2806 test_b(Operand(bitmap_scratch, MemoryChunk::kHeaderSize + kPointerSize), 1);
2808 j(second_bit == 1 ?
not_zero :
zero, has_color, has_color_distance);
2813 void MacroAssembler::GetMarkBits(Register addr_reg,
2814 Register bitmap_reg,
2815 Register mask_reg) {
2817 mov(bitmap_reg, Immediate(~Page::kPageAlignmentMask));
2818 and_(bitmap_reg, addr_reg);
2824 (Page::kPageAlignmentMask >> shift) & ~(Bitmap::kBytesPerCell - 1));
2826 add(bitmap_reg,
ecx);
2829 and_(
ecx, (1 << Bitmap::kBitsPerCellLog2) - 1);
2830 mov(mask_reg, Immediate(1));
2835 void MacroAssembler::EnsureNotWhite(
2837 Register bitmap_scratch,
2838 Register mask_scratch,
2839 Label* value_is_white_and_not_data,
2840 Label::Distance distance) {
2842 GetMarkBits(value, bitmap_scratch, mask_scratch);
2845 ASSERT(strcmp(Marking::kWhiteBitPattern,
"00") == 0);
2846 ASSERT(strcmp(Marking::kBlackBitPattern,
"10") == 0);
2847 ASSERT(strcmp(Marking::kGreyBitPattern,
"11") == 0);
2848 ASSERT(strcmp(Marking::kImpossibleBitPattern,
"01") == 0);
2854 test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
2857 if (emit_debug_code()) {
2862 add(mask_scratch, mask_scratch);
2863 test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
2864 j(
zero, &ok, Label::kNear);
2873 Register length =
ecx;
2874 Label not_heap_number;
2875 Label is_data_object;
2879 cmp(map,
FACTORY->heap_number_map());
2880 j(
not_equal, ¬_heap_number, Label::kNear);
2881 mov(length, Immediate(HeapNumber::kSize));
2882 jmp(&is_data_object, Label::kNear);
2884 bind(¬_heap_number);
2890 Register instance_type =
ecx;
2891 movzx_b(instance_type,
FieldOperand(map, Map::kInstanceTypeOffset));
2893 j(
not_zero, value_is_white_and_not_data);
2903 j(
zero, ¬_external, Label::kNear);
2904 mov(length, Immediate(ExternalString::kSize));
2905 jmp(&is_data_object, Label::kNear);
2907 bind(¬_external);
2912 add(length, Immediate(0x04));
2916 ASSERT_EQ(SeqAsciiString::kMaxSize, SeqTwoByteString::kMaxSize);
2917 ASSERT(SeqAsciiString::kMaxSize <=
2918 static_cast<int>(0xffffffffu >> (2 +
kSmiTagSize)));
2919 imul(length,
FieldOperand(value, String::kLengthOffset));
2924 bind(&is_data_object);
2927 or_(Operand(bitmap_scratch, MemoryChunk::kHeaderSize), mask_scratch);
2929 and_(bitmap_scratch, Immediate(~Page::kPageAlignmentMask));
2930 add(Operand(bitmap_scratch, MemoryChunk::kLiveBytesOffset),
2932 if (emit_debug_code()) {
2933 mov(length, Operand(bitmap_scratch, MemoryChunk::kLiveBytesOffset));
2934 cmp(length, Operand(bitmap_scratch, MemoryChunk::kSizeOffset));
2935 Check(
less_equal,
"Live Bytes Count overflow chunk size");
2942 void MacroAssembler::EnumLength(Register dst, Register map) {
2945 and_(dst, Immediate(Smi::FromInt(Map::EnumLengthBits::kMask)));
2949 void MacroAssembler::CheckEnumCache(Label* call_runtime) {
2958 cmp(
edx, Immediate(Smi::FromInt(Map::kInvalidEnumCache)));
2959 j(
equal, call_runtime);
2968 cmp(
edx, Immediate(Smi::FromInt(0)));
2976 cmp(
ecx, isolate()->factory()->empty_fixed_array());
2980 cmp(
ecx, isolate()->factory()->null_value());
2986 #endif // V8_TARGET_ARCH_IA32
const intptr_t kSmiTagMask
const uint32_t kNaNOrInfinityLowerBoundUpper32
bool is_intn(int x, int n)
bool AreAliased(Register r1, Register r2, Register r3, Register r4)
#define ASSERT(condition)
bool CanTransitionToMoreGeneralFastElementsKind(ElementsKind elements_kind, bool allow_only_packed)
const int kPointerSizeLog2
const uint32_t kStringRepresentationMask
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print more details following each garbage collection print amount of external allocated memory after each time it is adjusted flush code that we expect not to use again before full gc do incremental marking steps track object counts and memory usage use caching Perform compaction on every full GC Never perform compaction on full GC testing only Compact code space on full incremental collections Default seed for initializing random allows verbose printing trace parsing and preparsing Check icache flushes in ARM and MIPS simulator Stack alingment in bytes in print stack trace when throwing exceptions randomize hashes to avoid predictable hash Fixed seed to use to hash property activate a timer that switches between V8 threads testing_bool_flag float flag Seed used for threading test randomness A filename with extra code to be included in the Print usage including flags
const intptr_t kObjectAlignmentMask
bool IsFastElementsKind(ElementsKind kind)
const intptr_t kHeapObjectTagMask
const uint32_t kNotStringTag
bool IsFastPackedElementsKind(ElementsKind kind)
const uint32_t kIsIndirectStringMask
Operand FieldOperand(Register object, int offset)
bool IsAligned(T value, U alignment)
const uint32_t kHoleNanLower32
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
int TenToThe(int exponent)
MacroAssembler(Isolate *isolate, void *buffer, int size)
const uint32_t kStringTag
activate correct semantics for inheriting readonliness false
const uint32_t kIsNotStringMask
const int kNumSafepointRegisters
ElementsKind GetNextMoreGeneralFastElementsKind(ElementsKind elements_kind, bool allow_only_packed)
Operand ApiParameterOperand(int index)
Condition NegateCondition(Condition cond)
#define ASSERT_EQ(v1, v2)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if NULL
const uint32_t kIsIndirectStringTag
#define RUNTIME_ENTRY(name, nargs, ressize)
#define STATIC_ASSERT(test)
const uint32_t kAsciiStringTag
bool is_uintn(int x, int n)
const uint32_t kStringEncodingMask
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset kAccessCheckInfoOffset kEvalFrominstructionsOffsetOffset kInstanceClassNameOffset flag