30 #if V8_TARGET_ARCH_IA32
47 : Assembler(arg_isolate, buffer, size),
48 generating_stub_(
false),
50 if (isolate() !=
NULL) {
52 code_object_ = Handle<Object>(isolate()->heap()->undefined_value(),
62 }
else if (r.IsUInteger8()) {
64 }
else if (r.IsInteger16()) {
66 }
else if (r.IsUInteger16()) {
74 void MacroAssembler::Store(Register src,
const Operand& dst, Representation r) {
76 if (r.IsInteger8() || r.IsUInteger8()) {
78 }
else if (r.IsInteger16() || r.IsUInteger16()) {
86 void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) {
87 if (isolate()->heap()->RootCanBeTreatedAsConstant(index)) {
88 Handle<Object> value(&isolate()->heap()->roots_array_start()[index]);
89 mov(destination, value);
92 ExternalReference roots_array_start =
93 ExternalReference::roots_array_start(isolate());
94 mov(destination, Immediate(index));
95 mov(destination, Operand::StaticArray(destination,
101 void MacroAssembler::StoreRoot(Register source,
103 Heap::RootListIndex index) {
104 ASSERT(Heap::RootCanBeWrittenAfterInitialization(index));
105 ExternalReference roots_array_start =
106 ExternalReference::roots_array_start(isolate());
107 mov(scratch, Immediate(index));
113 void MacroAssembler::CompareRoot(Register with,
115 Heap::RootListIndex index) {
116 ExternalReference roots_array_start =
117 ExternalReference::roots_array_start(isolate());
118 mov(scratch, Immediate(index));
119 cmp(with, Operand::StaticArray(scratch,
125 void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
126 ASSERT(isolate()->heap()->RootCanBeTreatedAsConstant(index));
127 Handle<Object> value(&isolate()->heap()->roots_array_start()[index]);
132 void MacroAssembler::CompareRoot(
const Operand& with,
133 Heap::RootListIndex index) {
134 ASSERT(isolate()->heap()->RootCanBeTreatedAsConstant(index));
135 Handle<Object> value(&isolate()->heap()->roots_array_start()[index]);
140 void MacroAssembler::InNewSpace(
144 Label* condition_met,
145 Label::Distance condition_met_distance) {
147 if (scratch.is(
object)) {
148 and_(scratch, Immediate(~Page::kPageAlignmentMask));
150 mov(scratch, Immediate(~Page::kPageAlignmentMask));
151 and_(scratch,
object);
154 ASSERT(MemoryChunk::IN_FROM_SPACE < 8);
155 ASSERT(MemoryChunk::IN_TO_SPACE < 8);
156 int mask = (1 << MemoryChunk::IN_FROM_SPACE)
157 | (1 << MemoryChunk::IN_TO_SPACE);
159 test_b(Operand(scratch, MemoryChunk::kFlagsOffset),
160 static_cast<uint8_t>(mask));
161 j(cc, condition_met, condition_met_distance);
165 void MacroAssembler::RememberedSetHelper(
170 MacroAssembler::RememberedSetFinalAction and_then) {
172 if (emit_debug_code()) {
174 JumpIfNotInNewSpace(
object, scratch, &ok, Label::kNear);
179 ExternalReference store_buffer =
180 ExternalReference::store_buffer_top(isolate());
181 mov(scratch, Operand::StaticVariable(store_buffer));
183 mov(Operand(scratch, 0), addr);
187 mov(Operand::StaticVariable(store_buffer), scratch);
190 test(scratch, Immediate(StoreBuffer::kStoreBufferOverflowBit));
191 if (and_then == kReturnAtEnd) {
192 Label buffer_overflowed;
193 j(
not_equal, &buffer_overflowed, Label::kNear);
195 bind(&buffer_overflowed);
197 ASSERT(and_then == kFallThroughAtEnd);
198 j(
equal, &done, Label::kNear);
200 StoreBufferOverflowStub store_buffer_overflow =
201 StoreBufferOverflowStub(save_fp);
202 CallStub(&store_buffer_overflow);
203 if (and_then == kReturnAtEnd) {
206 ASSERT(and_then == kFallThroughAtEnd);
212 void MacroAssembler::ClampDoubleToUint8(XMMRegister input_reg,
213 XMMRegister scratch_reg,
214 Register result_reg) {
217 xorps(scratch_reg, scratch_reg);
218 cvtsd2si(result_reg, input_reg);
219 test(result_reg, Immediate(0xFFFFFF00));
220 j(
zero, &done, Label::kNear);
221 cmp(result_reg, Immediate(0x1));
222 j(
overflow, &conv_failure, Label::kNear);
223 mov(result_reg, Immediate(0));
224 setcc(
sign, result_reg);
225 sub(result_reg, Immediate(1));
226 and_(result_reg, Immediate(255));
227 jmp(&done, Label::kNear);
229 Move(result_reg, Immediate(0));
230 ucomisd(input_reg, scratch_reg);
231 j(
below, &done, Label::kNear);
232 Move(result_reg, Immediate(255));
237 void MacroAssembler::ClampUint8(Register reg) {
239 test(reg, Immediate(0xFFFFFF00));
240 j(
zero, &done, Label::kNear);
247 void MacroAssembler::SlowTruncateToI(Register result_reg,
250 DoubleToIStub stub(input_reg, result_reg, offset,
true);
251 call(stub.GetCode(isolate()), RelocInfo::CODE_TARGET);
255 void MacroAssembler::TruncateDoubleToI(Register result_reg,
256 XMMRegister input_reg) {
258 cvttsd2si(result_reg, Operand(input_reg));
259 cmp(result_reg, 0x1);
264 SlowTruncateToI(result_reg,
esp, 0);
270 void MacroAssembler::TruncateX87TOSToI(Register result_reg) {
273 SlowTruncateToI(result_reg,
esp, 0);
278 void MacroAssembler::X87TOSToI(Register result_reg,
280 Label* conversion_failed,
281 Label::Distance dst) {
292 test(result_reg, Operand(result_reg));
299 test(result_reg, Operand(result_reg));
300 j(
not_zero, conversion_failed, dst);
306 void MacroAssembler::DoubleToI(Register result_reg,
307 XMMRegister input_reg,
310 Label* conversion_failed,
311 Label::Distance dst) {
312 ASSERT(!input_reg.is(scratch));
313 cvttsd2si(result_reg, Operand(input_reg));
314 Cvtsi2sd(scratch, Operand(result_reg));
315 ucomisd(scratch, input_reg);
322 test(result_reg, Operand(result_reg));
324 movmskpd(result_reg, input_reg);
329 j(
not_zero, conversion_failed, dst);
335 void MacroAssembler::TruncateHeapNumberToI(Register result_reg,
336 Register input_reg) {
337 Label done, slow_case;
339 if (CpuFeatures::IsSupported(
SSE3)) {
340 CpuFeatureScope scope(
this,
SSE3);
344 fld_d(
FieldOperand(input_reg, HeapNumber::kValueOffset));
346 mov(result_reg,
FieldOperand(input_reg, HeapNumber::kExponentOffset));
347 and_(result_reg, HeapNumber::kExponentMask);
348 const uint32_t kTooBigExponent =
349 (HeapNumber::kExponentBias + 63) << HeapNumber::kExponentShift;
350 cmp(Operand(result_reg), Immediate(kTooBigExponent));
356 fisttp_d(Operand(
esp, 0));
357 mov(result_reg, Operand(
esp, 0));
359 jmp(&done, Label::kNear);
363 if (input_reg.is(result_reg)) {
366 fstp_d(Operand(
esp, 0));
367 SlowTruncateToI(result_reg,
esp, 0);
371 SlowTruncateToI(result_reg, input_reg);
373 }
else if (CpuFeatures::IsSupported(
SSE2)) {
374 CpuFeatureScope scope(
this,
SSE2);
376 cvttsd2si(result_reg, Operand(
xmm0));
377 cmp(result_reg, 0x1);
381 ExternalReference min_int = ExternalReference::address_of_min_int();
382 ucomisd(
xmm0, Operand::StaticVariable(min_int));
385 jmp(&done, Label::kNear);
389 if (input_reg.is(result_reg)) {
393 SlowTruncateToI(result_reg,
esp, 0);
396 SlowTruncateToI(result_reg, input_reg);
399 SlowTruncateToI(result_reg, input_reg);
405 void MacroAssembler::TaggedToI(Register result_reg,
409 Label* lost_precision) {
414 isolate()->factory()->heap_number_map());
415 j(
not_equal, lost_precision, Label::kNear);
417 if (CpuFeatures::IsSafeForSnapshot(
SSE2)) {
419 CpuFeatureScope scope(
this,
SSE2);
422 cvttsd2si(result_reg, Operand(
xmm0));
423 Cvtsi2sd(temp, Operand(result_reg));
425 RecordComment(
"Deferred TaggedToI: lost precision");
426 j(
not_equal, lost_precision, Label::kNear);
427 RecordComment(
"Deferred TaggedToI: NaN");
430 test(result_reg, Operand(result_reg));
432 movmskpd(result_reg,
xmm0);
434 RecordComment(
"Deferred TaggedToI: minus zero");
435 j(
not_zero, lost_precision, Label::kNear);
440 Label lost_precision_pop, zero_check;
442 ? &lost_precision_pop : lost_precision;
444 fld_d(
FieldOperand(input_reg, HeapNumber::kValueOffset));
450 j(
not_equal, lost_precision_int, Label::kNear);
453 test(result_reg, Operand(result_reg));
454 j(
zero, &zero_check, Label::kNear);
456 jmp(&done, Label::kNear);
461 fstp_s(Operand(
esp, 0));
463 test(result_reg, Operand(result_reg));
464 j(
zero, &done, Label::kNear);
465 jmp(lost_precision, Label::kNear);
467 bind(&lost_precision_pop);
469 jmp(lost_precision, Label::kNear);
476 void MacroAssembler::LoadUint32(XMMRegister dst,
478 XMMRegister scratch) {
480 cmp(src, Immediate(0));
481 ExternalReference uint32_bias =
482 ExternalReference::address_of_uint32_bias();
483 movsd(scratch, Operand::StaticVariable(uint32_bias));
491 void MacroAssembler::LoadUint32NoSSE2(Register src) {
494 fild_s(Operand(
esp, 0));
495 cmp(src, Immediate(0));
497 ExternalReference uint32_bias =
498 ExternalReference::address_of_uint32_bias();
499 fld_d(Operand::StaticVariable(uint32_bias));
506 void MacroAssembler::RecordWriteArray(Register
object,
526 Register dst = index;
531 object, dst, value, save_fp, remembered_set_action,
OMIT_SMI_CHECK);
537 if (emit_debug_code()) {
538 mov(value, Immediate(BitCast<int32_t>(
kZapValue)));
539 mov(index, Immediate(BitCast<int32_t>(
kZapValue)));
544 void MacroAssembler::RecordWriteField(
558 JumpIfSmi(value, &done, Label::kNear);
566 if (emit_debug_code()) {
569 j(
zero, &ok, Label::kNear);
575 object, dst, value, save_fp, remembered_set_action,
OMIT_SMI_CHECK);
581 if (emit_debug_code()) {
582 mov(value, Immediate(BitCast<int32_t>(
kZapValue)));
583 mov(dst, Immediate(BitCast<int32_t>(
kZapValue)));
588 void MacroAssembler::RecordWriteForMap(
596 Register address = scratch1;
597 Register value = scratch2;
598 if (emit_debug_code()) {
600 lea(address,
FieldOperand(
object, HeapObject::kMapOffset));
602 j(
zero, &ok, Label::kNear);
607 ASSERT(!
object.is(value));
608 ASSERT(!
object.is(address));
609 ASSERT(!value.is(address));
610 AssertNotSmi(
object);
612 if (!FLAG_incremental_marking) {
617 isolate()->counters()->write_barriers_static()->Increment();
618 IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1);
624 ASSERT(!isolate()->heap()->InNewSpace(*map));
625 CheckPageFlagForMap(map,
626 MemoryChunk::kPointersToHereAreInterestingMask,
635 lea(address,
FieldOperand(
object, HeapObject::kMapOffset));
636 mov(value, Immediate(map));
644 if (emit_debug_code()) {
645 mov(value, Immediate(BitCast<int32_t>(
kZapValue)));
646 mov(scratch1, Immediate(BitCast<int32_t>(
kZapValue)));
647 mov(scratch2, Immediate(BitCast<int32_t>(
kZapValue)));
652 void MacroAssembler::RecordWrite(Register
object,
658 ASSERT(!
object.is(value));
659 ASSERT(!
object.is(address));
660 ASSERT(!value.is(address));
661 AssertNotSmi(
object);
664 !FLAG_incremental_marking) {
668 if (emit_debug_code()) {
670 cmp(value, Operand(address, 0));
671 j(
equal, &ok, Label::kNear);
677 isolate()->counters()->write_barriers_static()->Increment();
678 IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1);
686 JumpIfSmi(value, &done, Label::kNear);
691 MemoryChunk::kPointersToHereAreInterestingMask,
695 CheckPageFlag(
object,
697 MemoryChunk::kPointersFromHereAreInterestingMask,
702 RecordWriteStub stub(
object, value, address, remembered_set_action, fp_mode);
709 if (emit_debug_code()) {
710 mov(address, Immediate(BitCast<int32_t>(
kZapValue)));
711 mov(value, Immediate(BitCast<int32_t>(
kZapValue)));
716 #ifdef ENABLE_DEBUGGER_SUPPORT
717 void MacroAssembler::DebugBreak() {
718 Move(
eax, Immediate(0));
719 mov(
ebx, Immediate(ExternalReference(Runtime::kDebugBreak, isolate())));
726 void MacroAssembler::Cvtsi2sd(XMMRegister dst,
const Operand& src) {
732 bool MacroAssembler::IsUnsafeImmediate(
const Immediate& x) {
733 static const int kMaxImmediateBits = 17;
734 if (!RelocInfo::IsNone(x.rmode_))
return false;
735 return !
is_intn(x.x_, kMaxImmediateBits);
739 void MacroAssembler::SafeMove(Register dst,
const Immediate& x) {
740 if (IsUnsafeImmediate(x) && jit_cookie() != 0) {
741 Move(dst, Immediate(x.x_ ^ jit_cookie()));
742 xor_(dst, jit_cookie());
749 void MacroAssembler::SafePush(
const Immediate& x) {
750 if (IsUnsafeImmediate(x) && jit_cookie() != 0) {
751 push(Immediate(x.x_ ^ jit_cookie()));
752 xor_(Operand(
esp, 0), Immediate(jit_cookie()));
759 void MacroAssembler::CmpObjectType(Register heap_object,
762 mov(map,
FieldOperand(heap_object, HeapObject::kMapOffset));
763 CmpInstanceType(map, type);
767 void MacroAssembler::CmpInstanceType(Register map,
InstanceType type) {
769 static_cast<int8_t>(type));
773 void MacroAssembler::CheckFastElements(Register map,
775 Label::Distance distance) {
781 Map::kMaximumBitField2FastHoleyElementValue);
782 j(
above, fail, distance);
786 void MacroAssembler::CheckFastObjectElements(Register map,
788 Label::Distance distance) {
794 Map::kMaximumBitField2FastHoleySmiElementValue);
797 Map::kMaximumBitField2FastHoleyElementValue);
798 j(
above, fail, distance);
802 void MacroAssembler::CheckFastSmiElements(Register map,
804 Label::Distance distance) {
808 Map::kMaximumBitField2FastHoleySmiElementValue);
809 j(
above, fail, distance);
813 void MacroAssembler::StoreNumberToDoubleElements(
814 Register maybe_number,
818 XMMRegister scratch2,
820 bool specialize_for_processor,
821 int elements_offset) {
822 Label smi_value, done, maybe_nan, not_nan, is_nan, have_double_value;
823 JumpIfSmi(maybe_number, &smi_value, Label::kNear);
825 CheckMap(maybe_number,
826 isolate()->factory()->heap_number_map(),
837 ExternalReference canonical_nan_reference =
838 ExternalReference::address_of_canonical_non_hole_nan();
839 if (CpuFeatures::IsSupported(
SSE2) && specialize_for_processor) {
840 CpuFeatureScope use_sse2(
this,
SSE2);
841 movsd(scratch2,
FieldOperand(maybe_number, HeapNumber::kValueOffset));
842 bind(&have_double_value);
844 FixedDoubleArray::kHeaderSize - elements_offset),
847 fld_d(
FieldOperand(maybe_number, HeapNumber::kValueOffset));
848 bind(&have_double_value);
850 FixedDoubleArray::kHeaderSize - elements_offset));
857 j(
greater, &is_nan, Label::kNear);
858 cmp(
FieldOperand(maybe_number, HeapNumber::kValueOffset), Immediate(0));
861 if (CpuFeatures::IsSupported(
SSE2) && specialize_for_processor) {
862 CpuFeatureScope use_sse2(
this,
SSE2);
863 movsd(scratch2, Operand::StaticVariable(canonical_nan_reference));
865 fld_d(Operand::StaticVariable(canonical_nan_reference));
867 jmp(&have_double_value, Label::kNear);
872 mov(scratch1, maybe_number);
874 if (CpuFeatures::IsSupported(
SSE2) && specialize_for_processor) {
875 CpuFeatureScope fscope(
this,
SSE2);
876 Cvtsi2sd(scratch2, scratch1);
878 FixedDoubleArray::kHeaderSize - elements_offset),
882 fild_s(Operand(
esp, 0));
885 FixedDoubleArray::kHeaderSize - elements_offset));
891 void MacroAssembler::CompareMap(Register
obj, Handle<Map> map) {
896 void MacroAssembler::CheckMap(Register obj,
901 JumpIfSmi(obj, fail);
904 CompareMap(obj, map);
909 void MacroAssembler::DispatchMap(Register obj,
912 Handle<Code> success,
916 JumpIfSmi(obj, &fail);
918 cmp(
FieldOperand(obj, HeapObject::kMapOffset), Immediate(map));
925 Condition MacroAssembler::IsObjectStringType(Register heap_object,
927 Register instance_type) {
928 mov(map,
FieldOperand(heap_object, HeapObject::kMapOffset));
929 movzx_b(instance_type,
FieldOperand(map, Map::kInstanceTypeOffset));
936 Condition MacroAssembler::IsObjectNameType(Register heap_object,
938 Register instance_type) {
939 mov(map,
FieldOperand(heap_object, HeapObject::kMapOffset));
940 movzx_b(instance_type,
FieldOperand(map, Map::kInstanceTypeOffset));
946 void MacroAssembler::IsObjectJSObjectType(Register heap_object,
950 mov(map,
FieldOperand(heap_object, HeapObject::kMapOffset));
951 IsInstanceJSObjectType(map, scratch, fail);
955 void MacroAssembler::IsInstanceJSObjectType(Register map,
958 movzx_b(scratch,
FieldOperand(map, Map::kInstanceTypeOffset));
966 void MacroAssembler::FCmp() {
967 if (CpuFeatures::IsSupported(
CMOV)) {
980 void MacroAssembler::AssertNumber(Register
object) {
981 if (emit_debug_code()) {
983 JumpIfSmi(
object, &ok);
985 isolate()->factory()->heap_number_map());
986 Check(
equal, kOperandNotANumber);
992 void MacroAssembler::AssertSmi(Register
object) {
993 if (emit_debug_code()) {
995 Check(
equal, kOperandIsNotASmi);
1000 void MacroAssembler::AssertString(Register
object) {
1001 if (emit_debug_code()) {
1003 Check(
not_equal, kOperandIsASmiAndNotAString);
1005 mov(
object,
FieldOperand(
object, HeapObject::kMapOffset));
1008 Check(
below, kOperandIsNotAString);
1013 void MacroAssembler::AssertName(Register
object) {
1014 if (emit_debug_code()) {
1016 Check(
not_equal, kOperandIsASmiAndNotAName);
1018 mov(
object,
FieldOperand(
object, HeapObject::kMapOffset));
1026 void MacroAssembler::AssertUndefinedOrAllocationSite(Register
object) {
1027 if (emit_debug_code()) {
1028 Label done_checking;
1029 AssertNotSmi(
object);
1030 cmp(
object, isolate()->factory()->undefined_value());
1031 j(
equal, &done_checking);
1033 Immediate(isolate()->factory()->allocation_site_map()));
1034 Assert(
equal, kExpectedUndefinedOrCell);
1035 bind(&done_checking);
1040 void MacroAssembler::AssertNotSmi(Register
object) {
1041 if (emit_debug_code()) {
1055 PredictableCodeSizeScope predictible_code_size_scope(
this,
1056 kNoCodeAgeSequenceLength);
1057 if (isolate()->IsCodePreAgingActive()) {
1059 call(isolate()->builtins()->MarkCodeAsExecutedOnce(),
1060 RelocInfo::CODE_AGE_SEQUENCE);
1061 Nop(kNoCodeAgeSequenceLength - Assembler::kCallInstructionLength);
1076 push(Immediate(Smi::FromInt(type)));
1077 push(Immediate(CodeObject()));
1078 if (emit_debug_code()) {
1079 cmp(Operand(
esp, 0), Immediate(isolate()->factory()->undefined_value()));
1080 Check(
not_equal, kCodeObjectNotProperlyPatched);
1086 if (emit_debug_code()) {
1087 cmp(Operand(
ebp, StandardFrameConstants::kMarkerOffset),
1088 Immediate(Smi::FromInt(type)));
1089 Check(
equal, kStackFrameTypesMustMatch);
1095 void MacroAssembler::EnterExitFramePrologue() {
1106 push(Immediate(CodeObject()));
1109 ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress, isolate());
1110 ExternalReference context_address(Isolate::kContextAddress, isolate());
1111 mov(Operand::StaticVariable(c_entry_fp_address),
ebp);
1112 mov(Operand::StaticVariable(context_address),
esi);
1116 void MacroAssembler::EnterExitFrameEpilogue(
int argc,
bool save_doubles) {
1119 CpuFeatureScope scope(
this,
SSE2);
1121 sub(
esp, Immediate(space));
1124 XMMRegister reg = XMMRegister::from_code(i);
1128 sub(
esp, Immediate(argc * kPointerSize));
1132 const int kFrameAlignment = OS::ActivationFrameAlignment();
1133 if (kFrameAlignment > 0) {
1135 and_(
esp, -kFrameAlignment);
1139 mov(Operand(
ebp, ExitFrameConstants::kSPOffset),
esp);
1143 void MacroAssembler::EnterExitFrame(
bool save_doubles) {
1144 EnterExitFramePrologue();
1147 int offset = StandardFrameConstants::kCallerSPOffset -
kPointerSize;
1152 EnterExitFrameEpilogue(3, save_doubles);
1156 void MacroAssembler::EnterApiExitFrame(
int argc) {
1157 EnterExitFramePrologue();
1158 EnterExitFrameEpilogue(argc,
false);
1162 void MacroAssembler::LeaveExitFrame(
bool save_doubles) {
1165 CpuFeatureScope scope(
this,
SSE2);
1168 XMMRegister reg = XMMRegister::from_code(i);
1174 mov(
ecx, Operand(
ebp, 1 * kPointerSize));
1175 mov(
ebp, Operand(
ebp, 0 * kPointerSize));
1178 lea(
esp, Operand(
esi, 1 * kPointerSize));
1183 LeaveExitFrameEpilogue(
true);
1187 void MacroAssembler::LeaveExitFrameEpilogue(
bool restore_context) {
1189 ExternalReference context_address(Isolate::kContextAddress, isolate());
1190 if (restore_context) {
1191 mov(
esi, Operand::StaticVariable(context_address));
1194 mov(Operand::StaticVariable(context_address), Immediate(0));
1198 ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress,
1200 mov(Operand::StaticVariable(c_entry_fp_address), Immediate(0));
1204 void MacroAssembler::LeaveApiExitFrame(
bool restore_context) {
1208 LeaveExitFrameEpilogue(restore_context);
1212 void MacroAssembler::PushTryHandler(StackHandler::Kind kind,
1213 int handler_index) {
1215 STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
1217 STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
1218 STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
1219 STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
1220 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
1224 if (kind == StackHandler::JS_ENTRY) {
1229 push(Immediate(Smi::FromInt(0)));
1236 StackHandler::IndexField::encode(handler_index) |
1237 StackHandler::KindField::encode(kind);
1238 push(Immediate(state));
1242 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
1243 push(Operand::StaticVariable(handler_address));
1245 mov(Operand::StaticVariable(handler_address),
esp);
1249 void MacroAssembler::PopTryHandler() {
1251 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
1252 pop(Operand::StaticVariable(handler_address));
1253 add(
esp, Immediate(StackHandlerConstants::kSize - kPointerSize));
1257 void MacroAssembler::JumpToHandlerEntry() {
1262 shr(
edx, StackHandler::kKindWidth);
1270 void MacroAssembler::Throw(Register value) {
1272 STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
1274 STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
1275 STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
1276 STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
1277 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
1280 if (!value.is(
eax)) {
1284 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
1285 mov(
esp, Operand::StaticVariable(handler_address));
1287 pop(Operand::StaticVariable(handler_address));
1302 j(
zero, &skip, Label::kNear);
1303 mov(Operand(
ebp, StandardFrameConstants::kContextOffset),
esi);
1306 JumpToHandlerEntry();
1310 void MacroAssembler::ThrowUncatchable(Register value) {
1312 STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
1314 STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
1315 STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
1316 STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
1317 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
1320 if (!value.is(
eax)) {
1324 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
1325 mov(
esp, Operand::StaticVariable(handler_address));
1328 Label fetch_next, check_kind;
1329 jmp(&check_kind, Label::kNear);
1331 mov(
esp, Operand(
esp, StackHandlerConstants::kNextOffset));
1335 test(Operand(
esp, StackHandlerConstants::kStateOffset),
1336 Immediate(StackHandler::KindField::kMask));
1340 pop(Operand::StaticVariable(handler_address));
1350 JumpToHandlerEntry();
1354 void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
1358 Label same_contexts;
1360 ASSERT(!holder_reg.is(scratch1));
1361 ASSERT(!holder_reg.is(scratch2));
1362 ASSERT(!scratch1.is(scratch2));
1365 mov(scratch1, Operand(
ebp, StandardFrameConstants::kContextOffset));
1368 if (emit_debug_code()) {
1369 cmp(scratch1, Immediate(0));
1370 Check(
not_equal, kWeShouldNotHaveAnEmptyLexicalContext);
1374 Context::kHeaderSize + Context::GLOBAL_OBJECT_INDEX *
kPointerSize;
1376 mov(scratch1,
FieldOperand(scratch1, GlobalObject::kNativeContextOffset));
1379 if (emit_debug_code()) {
1382 isolate()->factory()->native_context_map());
1383 Check(
equal, kJSGlobalObjectNativeContextShouldBeANativeContext);
1387 cmp(scratch1,
FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
1388 j(
equal, &same_contexts);
1397 FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
1400 if (emit_debug_code()) {
1401 cmp(scratch2, isolate()->factory()->null_value());
1402 Check(
not_equal, kJSGlobalProxyContextShouldNotBeNull);
1406 isolate()->factory()->native_context_map());
1407 Check(
equal, kJSGlobalObjectNativeContextShouldBeANativeContext);
1410 int token_offset = Context::kHeaderSize +
1416 bind(&same_contexts);
1425 void MacroAssembler::GetNumberHash(Register
r0, Register scratch) {
1427 if (Serializer::enabled()) {
1428 ExternalReference roots_array_start =
1429 ExternalReference::roots_array_start(isolate());
1430 mov(scratch, Immediate(Heap::kHashSeedRootIndex));
1436 int32_t seed = isolate()->heap()->HashSeed();
1437 xor_(r0, Immediate(seed));
1450 lea(r0, Operand(r0, r0,
times_4, 0));
1465 void MacroAssembler::LoadFromNumberDictionary(Label* miss,
1490 GetNumberHash(r0, r1);
1493 mov(r1,
FieldOperand(elements, SeededNumberDictionary::kCapacityOffset));
1498 for (
int i = 0; i < kNumberDictionaryProbes; i++) {
1503 add(r2, Immediate(SeededNumberDictionary::GetProbeOffset(i)));
1508 ASSERT(SeededNumberDictionary::kEntrySize == 3);
1509 lea(r2, Operand(r2, r2,
times_2, 0));
1515 SeededNumberDictionary::kElementsStartOffset));
1516 if (i != (kNumberDictionaryProbes - 1)) {
1525 const int kDetailsOffset =
1526 SeededNumberDictionary::kElementsStartOffset + 2 *
kPointerSize;
1529 Immediate(PropertyDetails::TypeField::kMask <<
kSmiTagSize));
1533 const int kValueOffset =
1534 SeededNumberDictionary::kElementsStartOffset +
kPointerSize;
1539 void MacroAssembler::LoadAllocationTopHelper(Register result,
1542 ExternalReference allocation_top =
1543 AllocationUtils::GetAllocationTopReference(isolate(), flags);
1551 cmp(result, Operand::StaticVariable(allocation_top));
1552 Check(
equal, kUnexpectedAllocationTop);
1558 if (scratch.is(
no_reg)) {
1559 mov(result, Operand::StaticVariable(allocation_top));
1561 mov(scratch, Immediate(allocation_top));
1562 mov(result, Operand(scratch, 0));
1567 void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
1570 if (emit_debug_code()) {
1572 Check(
zero, kUnalignedAllocationInNewSpace);
1575 ExternalReference allocation_top =
1576 AllocationUtils::GetAllocationTopReference(isolate(), flags);
1579 if (scratch.is(
no_reg)) {
1580 mov(Operand::StaticVariable(allocation_top), result_end);
1582 mov(Operand(scratch, 0), result_end);
1587 void MacroAssembler::Allocate(
int object_size,
1589 Register result_end,
1594 ASSERT(object_size <= Page::kMaxRegularHeapObjectSize);
1595 if (!FLAG_inline_new) {
1596 if (emit_debug_code()) {
1598 mov(result, Immediate(0x7091));
1599 if (result_end.is_valid()) {
1600 mov(result_end, Immediate(0x7191));
1602 if (scratch.is_valid()) {
1603 mov(scratch, Immediate(0x7291));
1609 ASSERT(!result.is(result_end));
1612 LoadAllocationTopHelper(result, scratch, flags);
1614 ExternalReference allocation_limit =
1615 AllocationUtils::GetAllocationLimitReference(isolate(), flags);
1624 j(
zero, &aligned, Label::kNear);
1626 cmp(result, Operand::StaticVariable(allocation_limit));
1629 mov(Operand(result, 0),
1630 Immediate(isolate()->factory()->one_pointer_filler_map()));
1636 Register top_reg = result_end.is_valid() ? result_end : result;
1637 if (!top_reg.is(result)) {
1638 mov(top_reg, result);
1640 add(top_reg, Immediate(object_size));
1641 j(
carry, gc_required);
1642 cmp(top_reg, Operand::StaticVariable(allocation_limit));
1643 j(
above, gc_required);
1646 UpdateAllocationTopHelper(top_reg, scratch, flags);
1650 if (top_reg.is(result)) {
1654 sub(result, Immediate(object_size));
1656 }
else if (tag_result) {
1663 void MacroAssembler::Allocate(
int header_size,
1665 Register element_count,
1668 Register result_end,
1673 if (!FLAG_inline_new) {
1674 if (emit_debug_code()) {
1676 mov(result, Immediate(0x7091));
1677 mov(result_end, Immediate(0x7191));
1678 if (scratch.is_valid()) {
1679 mov(scratch, Immediate(0x7291));
1686 ASSERT(!result.is(result_end));
1689 LoadAllocationTopHelper(result, scratch, flags);
1691 ExternalReference allocation_limit =
1692 AllocationUtils::GetAllocationLimitReference(isolate(), flags);
1696 if ((flags & DOUBLE_ALIGNMENT) != 0) {
1701 j(
zero, &aligned, Label::kNear);
1702 if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
1703 cmp(result, Operand::StaticVariable(allocation_limit));
1706 mov(Operand(result, 0),
1707 Immediate(isolate()->factory()->one_pointer_filler_map()));
1721 element_size =
static_cast<ScaleFactor>(element_size - 1);
1725 lea(result_end, Operand(element_count, element_size, header_size));
1726 add(result_end, result);
1727 j(
carry, gc_required);
1728 cmp(result_end, Operand::StaticVariable(allocation_limit));
1729 j(
above, gc_required);
1737 UpdateAllocationTopHelper(result_end, scratch, flags);
1741 void MacroAssembler::Allocate(Register object_size,
1743 Register result_end,
1748 if (!FLAG_inline_new) {
1749 if (emit_debug_code()) {
1751 mov(result, Immediate(0x7091));
1752 mov(result_end, Immediate(0x7191));
1753 if (scratch.is_valid()) {
1754 mov(scratch, Immediate(0x7291));
1761 ASSERT(!result.is(result_end));
1764 LoadAllocationTopHelper(result, scratch, flags);
1766 ExternalReference allocation_limit =
1767 AllocationUtils::GetAllocationLimitReference(isolate(), flags);
1771 if ((flags & DOUBLE_ALIGNMENT) != 0) {
1776 j(
zero, &aligned, Label::kNear);
1777 if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
1778 cmp(result, Operand::StaticVariable(allocation_limit));
1781 mov(Operand(result, 0),
1782 Immediate(isolate()->factory()->one_pointer_filler_map()));
1788 if (!object_size.is(result_end)) {
1789 mov(result_end, object_size);
1791 add(result_end, result);
1792 j(
carry, gc_required);
1793 cmp(result_end, Operand::StaticVariable(allocation_limit));
1794 j(
above, gc_required);
1803 UpdateAllocationTopHelper(result_end, scratch, flags);
1807 void MacroAssembler::UndoAllocationInNewSpace(Register
object) {
1808 ExternalReference new_space_allocation_top =
1809 ExternalReference::new_space_allocation_top_address(isolate());
1814 cmp(
object, Operand::StaticVariable(new_space_allocation_top));
1815 Check(
below, kUndoAllocationOfNonAllocatedMemory);
1817 mov(Operand::StaticVariable(new_space_allocation_top),
object);
1821 void MacroAssembler::AllocateHeapNumber(Register result,
1824 Label* gc_required) {
1826 Allocate(HeapNumber::kSize, result, scratch1, scratch2, gc_required,
1831 Immediate(isolate()->factory()->heap_number_map()));
1835 void MacroAssembler::AllocateTwoByteString(Register result,
1840 Label* gc_required) {
1850 Allocate(SeqTwoByteString::kHeaderSize,
1862 Immediate(isolate()->factory()->string_map()));
1863 mov(scratch1, length);
1865 mov(
FieldOperand(result, String::kLengthOffset), scratch1);
1867 Immediate(String::kEmptyHashField));
1871 void MacroAssembler::AllocateAsciiString(Register result,
1876 Label* gc_required) {
1880 mov(scratch1, length);
1886 Allocate(SeqOneByteString::kHeaderSize,
1898 Immediate(isolate()->factory()->ascii_string_map()));
1899 mov(scratch1, length);
1901 mov(
FieldOperand(result, String::kLengthOffset), scratch1);
1903 Immediate(String::kEmptyHashField));
1907 void MacroAssembler::AllocateAsciiString(Register result,
1911 Label* gc_required) {
1915 Allocate(SeqOneByteString::SizeFor(length), result, scratch1, scratch2,
1920 Immediate(isolate()->factory()->ascii_string_map()));
1922 Immediate(Smi::FromInt(length)));
1924 Immediate(String::kEmptyHashField));
1928 void MacroAssembler::AllocateTwoByteConsString(Register result,
1931 Label* gc_required) {
1933 Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required,
1938 Immediate(isolate()->factory()->cons_string_map()));
1942 void MacroAssembler::AllocateAsciiConsString(Register result,
1945 Label* gc_required) {
1946 Label allocate_new_space, install_map;
1949 ExternalReference high_promotion_mode = ExternalReference::
1950 new_space_high_promotion_mode_active_address(isolate());
1952 test(Operand::StaticVariable(high_promotion_mode), Immediate(1));
1953 j(
zero, &allocate_new_space);
1955 Allocate(ConsString::kSize,
1963 bind(&allocate_new_space);
1964 Allocate(ConsString::kSize,
1974 Immediate(isolate()->factory()->cons_ascii_string_map()));
1978 void MacroAssembler::AllocateTwoByteSlicedString(Register result,
1981 Label* gc_required) {
1983 Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
1988 Immediate(isolate()->factory()->sliced_string_map()));
1992 void MacroAssembler::AllocateAsciiSlicedString(Register result,
1995 Label* gc_required) {
1997 Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
2002 Immediate(isolate()->factory()->sliced_ascii_string_map()));
2015 Register destination,
2018 Label short_loop, len4, len8, len12, done, short_string;
2022 cmp(length, Immediate(4));
2023 j(
below, &short_string, Label::kNear);
2028 mov(scratch, Operand(source, length,
times_1, -4));
2029 mov(Operand(destination, length,
times_1, -4), scratch);
2031 cmp(length, Immediate(8));
2033 cmp(length, Immediate(12));
2035 cmp(length, Immediate(16));
2041 and_(scratch, Immediate(0x3));
2042 add(destination, scratch);
2043 jmp(&done, Label::kNear);
2046 mov(scratch, Operand(source, 8));
2047 mov(Operand(destination, 8), scratch);
2049 mov(scratch, Operand(source, 4));
2050 mov(Operand(destination, 4), scratch);
2052 mov(scratch, Operand(source, 0));
2053 mov(Operand(destination, 0), scratch);
2054 add(destination, length);
2055 jmp(&done, Label::kNear);
2057 bind(&short_string);
2058 test(length, length);
2059 j(
zero, &done, Label::kNear);
2062 mov_b(scratch, Operand(source, 0));
2063 mov_b(Operand(destination, 0), scratch);
2073 void MacroAssembler::InitializeFieldsWithFiller(Register start_offset,
2074 Register end_offset,
2079 mov(Operand(start_offset, 0), filler);
2080 add(start_offset, Immediate(kPointerSize));
2082 cmp(start_offset, end_offset);
2087 void MacroAssembler::BooleanBitTest(Register
object,
2094 test_b(
FieldOperand(
object, field_offset + byte_index),
2095 static_cast<byte>(1 << byte_bit_index));
2100 void MacroAssembler::NegativeZeroTest(Register result,
2102 Label* then_label) {
2104 test(result, result);
2107 j(
sign, then_label);
2112 void MacroAssembler::NegativeZeroTest(Register result,
2116 Label* then_label) {
2118 test(result, result);
2122 j(
sign, then_label);
2127 void MacroAssembler::TryGetFunctionPrototype(Register
function,
2131 bool miss_on_bound_function) {
2133 JumpIfSmi(
function, miss);
2139 if (miss_on_bound_function) {
2142 FieldOperand(
function, JSFunction::kSharedFunctionInfoOffset));
2143 BooleanBitTest(scratch, SharedFunctionInfo::kCompilerHintsOffset,
2144 SharedFunctionInfo::kBoundFunction);
2150 movzx_b(scratch,
FieldOperand(result, Map::kBitFieldOffset));
2151 test(scratch, Immediate(1 << Map::kHasNonInstancePrototype));
2156 FieldOperand(
function, JSFunction::kPrototypeOrInitialMapOffset));
2161 cmp(result, Immediate(isolate()->factory()->the_hole_value()));
2166 CmpObjectType(result,
MAP_TYPE, scratch);
2170 mov(result,
FieldOperand(result, Map::kPrototypeOffset));
2175 bind(&non_instance);
2176 mov(result,
FieldOperand(result, Map::kConstructorOffset));
2183 void MacroAssembler::CallStub(CodeStub* stub, TypeFeedbackId ast_id) {
2184 ASSERT(AllowThisStubCall(stub));
2185 call(stub->GetCode(isolate()), RelocInfo::CODE_TARGET, ast_id);
2189 void MacroAssembler::TailCallStub(CodeStub* stub) {
2190 jmp(stub->GetCode(isolate()), RelocInfo::CODE_TARGET);
2194 void MacroAssembler::StubReturn(
int argc) {
2195 ASSERT(argc >= 1 && generating_stub());
2196 ret((argc - 1) * kPointerSize);
2200 bool MacroAssembler::AllowThisStubCall(CodeStub* stub) {
2201 return has_frame_ || !stub->SometimesSetsUpAFrame();
2205 void MacroAssembler::IllegalOperation(
int num_arguments) {
2206 if (num_arguments > 0) {
2207 add(
esp, Immediate(num_arguments * kPointerSize));
2209 mov(
eax, Immediate(isolate()->factory()->undefined_value()));
2213 void MacroAssembler::IndexFromHash(Register hash, Register index) {
2218 (1 << String::kArrayIndexValueBits));
2221 and_(hash, String::kArrayIndexValueMask);
2226 if (!index.is(hash)) {
2232 void MacroAssembler::CallRuntime(
const Runtime::Function* f,
2238 if (f->nargs >= 0 && f->nargs != num_arguments) {
2239 IllegalOperation(num_arguments);
2247 Move(
eax, Immediate(num_arguments));
2248 mov(
ebx, Immediate(ExternalReference(f, isolate())));
2249 CEntryStub ces(1, CpuFeatures::IsSupported(
SSE2) ? save_doubles
2255 void MacroAssembler::CallExternalReference(ExternalReference ref,
2256 int num_arguments) {
2257 mov(
eax, Immediate(num_arguments));
2258 mov(
ebx, Immediate(ref));
2265 void MacroAssembler::TailCallExternalReference(
const ExternalReference& ext,
2272 Move(
eax, Immediate(num_arguments));
2273 JumpToExternalReference(ext);
2277 void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
2280 TailCallExternalReference(ExternalReference(fid, isolate()),
2287 return Operand(
esp, index * kPointerSize);
2291 void MacroAssembler::PrepareCallApiFunction(
int argc) {
2292 EnterApiExitFrame(argc);
2293 if (emit_debug_code()) {
2299 void MacroAssembler::CallApiFunctionAndReturn(
2300 Register function_address,
2302 Operand thunk_last_arg,
2304 Operand return_value_operand,
2305 Operand* context_restore_operand) {
2306 ExternalReference next_address =
2307 ExternalReference::handle_scope_next_address(isolate());
2308 ExternalReference limit_address =
2309 ExternalReference::handle_scope_limit_address(isolate());
2310 ExternalReference level_address =
2311 ExternalReference::handle_scope_level_address(isolate());
2315 mov(
ebx, Operand::StaticVariable(next_address));
2316 mov(
edi, Operand::StaticVariable(limit_address));
2317 add(Operand::StaticVariable(level_address), Immediate(1));
2319 if (FLAG_log_timer_events) {
2320 FrameScope frame(
this, StackFrame::MANUAL);
2321 PushSafepointRegisters();
2322 PrepareCallCFunction(1,
eax);
2323 mov(Operand(
esp, 0),
2324 Immediate(ExternalReference::isolate_address(isolate())));
2325 CallCFunction(ExternalReference::log_enter_external_function(isolate()), 1);
2326 PopSafepointRegisters();
2330 Label profiler_disabled;
2331 Label end_profiler_check;
2332 bool* is_profiling_flag =
2333 isolate()->cpu_profiler()->is_profiling_address();
2335 mov(
eax, Immediate(reinterpret_cast<Address>(is_profiling_flag)));
2336 cmpb(Operand(
eax, 0), 0);
2337 j(
zero, &profiler_disabled);
2340 mov(thunk_last_arg, function_address);
2343 jmp(&end_profiler_check);
2345 bind(&profiler_disabled);
2347 call(function_address);
2348 bind(&end_profiler_check);
2350 if (FLAG_log_timer_events) {
2351 FrameScope frame(
this, StackFrame::MANUAL);
2352 PushSafepointRegisters();
2353 PrepareCallCFunction(1,
eax);
2354 mov(Operand(
esp, 0),
2355 Immediate(ExternalReference::isolate_address(isolate())));
2356 CallCFunction(ExternalReference::log_leave_external_function(isolate()), 1);
2357 PopSafepointRegisters();
2362 mov(
eax, return_value_operand);
2364 Label promote_scheduled_exception;
2365 Label exception_handled;
2366 Label delete_allocated_handles;
2367 Label leave_exit_frame;
2372 mov(Operand::StaticVariable(next_address),
ebx);
2373 sub(Operand::StaticVariable(level_address), Immediate(1));
2375 cmp(
edi, Operand::StaticVariable(limit_address));
2376 j(
not_equal, &delete_allocated_handles);
2377 bind(&leave_exit_frame);
2380 ExternalReference scheduled_exception_address =
2381 ExternalReference::scheduled_exception_address(isolate());
2382 cmp(Operand::StaticVariable(scheduled_exception_address),
2383 Immediate(isolate()->factory()->the_hole_value()));
2384 j(
not_equal, &promote_scheduled_exception);
2385 bind(&exception_handled);
2387 #if ENABLE_EXTRA_CHECKS
2390 Register return_value =
eax;
2393 JumpIfSmi(return_value, &ok, Label::kNear);
2394 mov(map,
FieldOperand(return_value, HeapObject::kMapOffset));
2397 j(
below, &ok, Label::kNear);
2402 cmp(map, isolate()->factory()->heap_number_map());
2403 j(
equal, &ok, Label::kNear);
2405 cmp(return_value, isolate()->factory()->undefined_value());
2406 j(
equal, &ok, Label::kNear);
2408 cmp(return_value, isolate()->factory()->true_value());
2409 j(
equal, &ok, Label::kNear);
2411 cmp(return_value, isolate()->factory()->false_value());
2412 j(
equal, &ok, Label::kNear);
2414 cmp(return_value, isolate()->factory()->null_value());
2415 j(
equal, &ok, Label::kNear);
2417 Abort(kAPICallReturnedInvalidObject);
2422 bool restore_context = context_restore_operand !=
NULL;
2423 if (restore_context) {
2424 mov(
esi, *context_restore_operand);
2426 LeaveApiExitFrame(!restore_context);
2427 ret(stack_space * kPointerSize);
2429 bind(&promote_scheduled_exception);
2432 CallRuntime(Runtime::kHiddenPromoteScheduledException, 0);
2434 jmp(&exception_handled);
2437 ExternalReference delete_extensions =
2438 ExternalReference::delete_handle_scope_extensions(isolate());
2439 bind(&delete_allocated_handles);
2440 mov(Operand::StaticVariable(limit_address),
edi);
2442 mov(Operand(
esp, 0),
2443 Immediate(ExternalReference::isolate_address(isolate())));
2444 mov(
eax, Immediate(delete_extensions));
2447 jmp(&leave_exit_frame);
2451 void MacroAssembler::JumpToExternalReference(
const ExternalReference& ext) {
2453 mov(
ebx, Immediate(ext));
2455 jmp(ces.GetCode(isolate()), RelocInfo::CODE_TARGET);
2459 void MacroAssembler::InvokePrologue(
const ParameterCount& expected,
2460 const ParameterCount& actual,
2461 Handle<Code> code_constant,
2462 const Operand& code_operand,
2464 bool* definitely_mismatches,
2466 Label::Distance done_near,
2467 const CallWrapper& call_wrapper) {
2468 bool definitely_matches =
false;
2469 *definitely_mismatches =
false;
2471 if (expected.is_immediate()) {
2472 ASSERT(actual.is_immediate());
2473 if (expected.immediate() == actual.immediate()) {
2474 definitely_matches =
true;
2476 mov(
eax, actual.immediate());
2477 const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
2478 if (expected.immediate() == sentinel) {
2483 definitely_matches =
true;
2485 *definitely_mismatches =
true;
2486 mov(
ebx, expected.immediate());
2490 if (actual.is_immediate()) {
2494 cmp(expected.reg(), actual.immediate());
2497 mov(
eax, actual.immediate());
2498 }
else if (!expected.reg().is(actual.reg())) {
2501 cmp(expected.reg(), actual.reg());
2508 if (!definitely_matches) {
2509 Handle<Code> adaptor =
2510 isolate()->builtins()->ArgumentsAdaptorTrampoline();
2511 if (!code_constant.is_null()) {
2512 mov(
edx, Immediate(code_constant));
2514 }
else if (!code_operand.is_reg(
edx)) {
2515 mov(
edx, code_operand);
2519 call_wrapper.BeforeCall(CallSize(adaptor, RelocInfo::CODE_TARGET));
2520 call(adaptor, RelocInfo::CODE_TARGET);
2521 call_wrapper.AfterCall();
2522 if (!*definitely_mismatches) {
2523 jmp(done, done_near);
2526 jmp(adaptor, RelocInfo::CODE_TARGET);
2533 void MacroAssembler::InvokeCode(
const Operand&
code,
2534 const ParameterCount& expected,
2535 const ParameterCount& actual,
2537 const CallWrapper& call_wrapper) {
2542 bool definitely_mismatches =
false;
2543 InvokePrologue(expected, actual, Handle<Code>::null(), code,
2544 &done, &definitely_mismatches, flag, Label::kNear,
2546 if (!definitely_mismatches) {
2548 call_wrapper.BeforeCall(CallSize(code));
2550 call_wrapper.AfterCall();
2560 void MacroAssembler::InvokeFunction(Register fun,
2561 const ParameterCount& actual,
2563 const CallWrapper& call_wrapper) {
2573 ParameterCount expected(
ebx);
2575 expected, actual, flag, call_wrapper);
2579 void MacroAssembler::InvokeFunction(Register fun,
2580 const ParameterCount& expected,
2581 const ParameterCount& actual,
2583 const CallWrapper& call_wrapper) {
2591 expected, actual, flag, call_wrapper);
2595 void MacroAssembler::InvokeFunction(Handle<JSFunction>
function,
2596 const ParameterCount& expected,
2597 const ParameterCount& actual,
2599 const CallWrapper& call_wrapper) {
2600 LoadHeapObject(
edi,
function);
2601 InvokeFunction(
edi, expected, actual, flag, call_wrapper);
2605 void MacroAssembler::InvokeBuiltin(Builtins::JavaScript
id,
2607 const CallWrapper& call_wrapper) {
2614 ParameterCount expected(0);
2615 GetBuiltinFunction(
edi,
id);
2617 expected, expected, flag, call_wrapper);
2621 void MacroAssembler::GetBuiltinFunction(Register target,
2622 Builtins::JavaScript
id) {
2624 mov(target, Operand(
esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
2625 mov(target,
FieldOperand(target, GlobalObject::kBuiltinsOffset));
2627 JSBuiltinsObject::OffsetOfFunctionWithId(
id)));
2631 void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript
id) {
2634 GetBuiltinFunction(
edi,
id);
2640 void MacroAssembler::LoadContext(Register dst,
int context_chain_length) {
2641 if (context_chain_length > 0) {
2643 mov(dst, Operand(
esi, Context::SlotOffset(Context::PREVIOUS_INDEX)));
2644 for (
int i = 1; i < context_chain_length; i++) {
2645 mov(dst, Operand(dst, Context::SlotOffset(Context::PREVIOUS_INDEX)));
2658 if (emit_debug_code()) {
2660 isolate()->factory()->with_context_map());
2661 Check(
not_equal, kVariableResolvedToWithContext);
2666 void MacroAssembler::LoadTransitionedArrayMapConditional(
2669 Register map_in_out,
2671 Label* no_map_match) {
2673 mov(scratch, Operand(
esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
2674 mov(scratch,
FieldOperand(scratch, GlobalObject::kNativeContextOffset));
2677 mov(scratch, Operand(scratch,
2678 Context::SlotOffset(Context::JS_ARRAY_MAPS_INDEX)));
2680 size_t offset = expected_kind * kPointerSize +
2681 FixedArrayBase::kHeaderSize;
2686 offset = transitioned_kind * kPointerSize +
2687 FixedArrayBase::kHeaderSize;
2692 void MacroAssembler::LoadGlobalFunction(
int index, Register
function) {
2695 Operand(
esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
2698 FieldOperand(
function, GlobalObject::kNativeContextOffset));
2700 mov(
function, Operand(
function, Context::SlotOffset(index)));
2704 void MacroAssembler::LoadGlobalFunctionInitialMap(Register
function,
2707 mov(map,
FieldOperand(
function, JSFunction::kPrototypeOrInitialMapOffset));
2708 if (emit_debug_code()) {
2710 CheckMap(map, isolate()->factory()->meta_map(), &fail,
DO_SMI_CHECK);
2713 Abort(kGlobalFunctionsMustHaveInitialMap);
2721 void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Register src) {
2722 mov(SafepointRegisterSlot(dst), src);
2726 void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Immediate src) {
2727 mov(SafepointRegisterSlot(dst), src);
2731 void MacroAssembler::LoadFromSafepointRegisterSlot(Register dst, Register src) {
2732 mov(dst, SafepointRegisterSlot(src));
2736 Operand MacroAssembler::SafepointRegisterSlot(Register reg) {
2737 return Operand(
esp, SafepointRegisterStackIndex(reg.code()) * kPointerSize);
2741 int MacroAssembler::SafepointRegisterStackIndex(
int reg_code) {
2750 void MacroAssembler::LoadHeapObject(Register result,
2751 Handle<HeapObject>
object) {
2753 if (isolate()->heap()->InNewSpace(*
object)) {
2754 Handle<Cell> cell = isolate()->factory()->NewCell(
object);
2755 mov(result, Operand::ForCell(cell));
2757 mov(result,
object);
2762 void MacroAssembler::CmpHeapObject(Register reg, Handle<HeapObject>
object) {
2764 if (isolate()->heap()->InNewSpace(*
object)) {
2765 Handle<Cell> cell = isolate()->factory()->NewCell(
object);
2766 cmp(reg, Operand::ForCell(cell));
2773 void MacroAssembler::PushHeapObject(Handle<HeapObject>
object) {
2775 if (isolate()->heap()->InNewSpace(*
object)) {
2776 Handle<Cell> cell = isolate()->factory()->NewCell(
object);
2777 push(Operand::ForCell(cell));
2784 void MacroAssembler::Ret() {
2789 void MacroAssembler::Ret(
int bytes_dropped, Register scratch) {
2790 if (is_uint16(bytes_dropped)) {
2794 add(
esp, Immediate(bytes_dropped));
2801 void MacroAssembler::VerifyX87StackDepth(uint32_t depth) {
2808 int tos = (8 - depth) % 8;
2809 const int kTopMask = 0x3800;
2813 and_(
eax, kTopMask);
2815 cmp(
eax, Immediate(tos));
2816 Check(
equal, kUnexpectedFPUStackDepthAfterInstruction);
2822 void MacroAssembler::Drop(
int stack_elements) {
2823 if (stack_elements > 0) {
2824 add(
esp, Immediate(stack_elements * kPointerSize));
2829 void MacroAssembler::Move(Register dst, Register src) {
2836 void MacroAssembler::Move(Register dst,
const Immediate& x) {
2845 void MacroAssembler::Move(
const Operand& dst,
const Immediate& x) {
2850 void MacroAssembler::Move(XMMRegister dst,
double val) {
2852 CpuFeatureScope scope(
this,
SSE2);
2853 uint64_t int_val = BitCast<uint64_t, double>(val);
2859 push(Immediate(upper));
2860 push(Immediate(lower));
2861 movsd(dst, Operand(
esp, 0));
2867 void MacroAssembler::SetCounter(StatsCounter* counter,
int value) {
2868 if (FLAG_native_code_counters && counter->Enabled()) {
2869 mov(Operand::StaticVariable(ExternalReference(counter)), Immediate(value));
2874 void MacroAssembler::IncrementCounter(StatsCounter* counter,
int value) {
2876 if (FLAG_native_code_counters && counter->Enabled()) {
2877 Operand operand = Operand::StaticVariable(ExternalReference(counter));
2881 add(operand, Immediate(value));
2887 void MacroAssembler::DecrementCounter(StatsCounter* counter,
int value) {
2889 if (FLAG_native_code_counters && counter->Enabled()) {
2890 Operand operand = Operand::StaticVariable(ExternalReference(counter));
2894 sub(operand, Immediate(value));
2900 void MacroAssembler::IncrementCounter(
Condition cc,
2901 StatsCounter* counter,
2904 if (FLAG_native_code_counters && counter->Enabled()) {
2908 IncrementCounter(counter, value);
2915 void MacroAssembler::DecrementCounter(
Condition cc,
2916 StatsCounter* counter,
2919 if (FLAG_native_code_counters && counter->Enabled()) {
2923 DecrementCounter(counter, value);
2931 if (emit_debug_code()) Check(cc, reason);
2935 void MacroAssembler::AssertFastElements(Register elements) {
2936 if (emit_debug_code()) {
2937 Factory* factory = isolate()->factory();
2940 Immediate(factory->fixed_array_map()));
2943 Immediate(factory->fixed_double_array_map()));
2946 Immediate(factory->fixed_cow_array_map()));
2948 Abort(kJSObjectWithFastElementsMapHasSlowElements);
2963 void MacroAssembler::CheckStackAlignment() {
2964 int frame_alignment = OS::ActivationFrameAlignment();
2965 int frame_alignment_mask = frame_alignment - 1;
2966 if (frame_alignment > kPointerSize) {
2968 Label alignment_as_expected;
2969 test(
esp, Immediate(frame_alignment_mask));
2970 j(
zero, &alignment_as_expected);
2973 bind(&alignment_as_expected);
2982 RecordComment(
"Abort message: ");
2986 if (FLAG_trap_on_abort) {
2993 push(Immediate(reinterpret_cast<intptr_t>(Smi::FromInt(reason))));
2999 CallRuntime(Runtime::kAbort, 1);
3001 CallRuntime(Runtime::kAbort, 1);
3012 RecordComment(
"Throw message: ");
3018 push(Immediate(Smi::FromInt(reason)));
3024 CallRuntime(Runtime::kHiddenThrowMessage, 1);
3026 CallRuntime(Runtime::kHiddenThrowMessage, 1);
3042 void MacroAssembler::LoadInstanceDescriptors(Register map,
3043 Register descriptors) {
3044 mov(descriptors,
FieldOperand(map, Map::kDescriptorsOffset));
3048 void MacroAssembler::NumberOfOwnDescriptors(Register dst, Register map) {
3050 DecodeField<Map::NumberOfOwnDescriptorsBits>(dst);
3054 void MacroAssembler::LoadPowerOf2(XMMRegister dst,
3058 HeapNumber::kExponentBits));
3059 mov(scratch, Immediate(power + HeapNumber::kExponentBias));
3061 psllq(dst, HeapNumber::kMantissaBits);
3065 void MacroAssembler::LookupNumberStringCache(Register
object,
3071 Register number_string_cache = result;
3072 Register mask = scratch1;
3073 Register scratch = scratch2;
3076 LoadRoot(number_string_cache, Heap::kNumberStringCacheRootIndex);
3079 mov(mask,
FieldOperand(number_string_cache, FixedArray::kLengthOffset));
3081 sub(mask, Immediate(1));
3087 Label smi_hash_calculated;
3088 Label load_result_from_cache;
3091 JumpIfNotSmi(
object, ¬_smi, Label::kNear);
3092 mov(scratch,
object);
3094 jmp(&smi_hash_calculated, Label::kNear);
3097 isolate()->factory()->heap_number_map());
3100 mov(scratch,
FieldOperand(
object, HeapNumber::kValueOffset));
3101 xor_(scratch,
FieldOperand(
object, HeapNumber::kValueOffset + 4));
3103 and_(scratch, mask);
3104 Register index = scratch;
3105 Register probe = mask;
3110 FixedArray::kHeaderSize));
3111 JumpIfSmi(probe, not_found);
3112 if (CpuFeatures::IsSupported(
SSE2)) {
3113 CpuFeatureScope fscope(
this,
SSE2);
3123 jmp(&load_result_from_cache, Label::kNear);
3125 bind(&smi_hash_calculated);
3127 and_(scratch, mask);
3133 FixedArray::kHeaderSize));
3137 bind(&load_result_from_cache);
3142 FixedArray::kHeaderSize + kPointerSize));
3143 IncrementCounter(isolate()->counters()->number_to_string_native(), 1);
3147 void MacroAssembler::JumpIfInstanceTypeIsNotSequentialAscii(
3148 Register instance_type,
3151 if (!scratch.is(instance_type)) {
3152 mov(scratch, instance_type);
3161 void MacroAssembler::JumpIfNotBothSequentialAsciiStrings(Register object1,
3168 mov(scratch1, object1);
3169 and_(scratch1, object2);
3170 JumpIfSmi(scratch1, failure);
3173 mov(scratch1,
FieldOperand(object1, HeapObject::kMapOffset));
3174 mov(scratch2,
FieldOperand(object2, HeapObject::kMapOffset));
3175 movzx_b(scratch1,
FieldOperand(scratch1, Map::kInstanceTypeOffset));
3176 movzx_b(scratch2,
FieldOperand(scratch2, Map::kInstanceTypeOffset));
3179 const int kFlatAsciiStringMask =
3181 const int kFlatAsciiStringTag =
3184 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
3185 and_(scratch1, kFlatAsciiStringMask);
3186 and_(scratch2, kFlatAsciiStringMask);
3187 lea(scratch1, Operand(scratch1, scratch2,
times_8, 0));
3188 cmp(scratch1, kFlatAsciiStringTag | (kFlatAsciiStringTag << 3));
3193 void MacroAssembler::JumpIfNotUniqueName(Operand operand,
3194 Label* not_unique_name,
3195 Label::Distance distance) {
3201 j(
not_equal, not_unique_name, distance);
3207 void MacroAssembler::EmitSeqStringSetCharCheck(Register
string,
3210 uint32_t encoding_mask) {
3212 JumpIfNotSmi(
string, &is_object, Label::kNear);
3217 mov(value,
FieldOperand(
string, HeapObject::kMapOffset));
3218 movzx_b(value,
FieldOperand(value, Map::kInstanceTypeOffset));
3221 cmp(value, Immediate(encoding_mask));
3223 Check(
equal, kUnexpectedStringType);
3231 cmp(index,
FieldOperand(
string, String::kLengthOffset));
3232 Check(
less, kIndexIsTooLarge);
3234 cmp(index, Immediate(Smi::FromInt(0)));
3242 void MacroAssembler::PrepareCallCFunction(
int num_arguments, Register scratch) {
3243 int frame_alignment = OS::ActivationFrameAlignment();
3244 if (frame_alignment != 0) {
3248 sub(
esp, Immediate((num_arguments + 1) * kPointerSize));
3250 and_(
esp, -frame_alignment);
3251 mov(Operand(
esp, num_arguments * kPointerSize), scratch);
3253 sub(
esp, Immediate(num_arguments * kPointerSize));
3258 void MacroAssembler::CallCFunction(ExternalReference
function,
3259 int num_arguments) {
3261 mov(
eax, Immediate(
function));
3262 CallCFunction(
eax, num_arguments);
3266 void MacroAssembler::CallCFunction(Register
function,
3267 int num_arguments) {
3270 if (emit_debug_code()) {
3271 CheckStackAlignment();
3275 if (OS::ActivationFrameAlignment() != 0) {
3276 mov(
esp, Operand(
esp, num_arguments * kPointerSize));
3278 add(
esp, Immediate(num_arguments * kPointerSize));
3283 bool AreAliased(Register r1, Register r2, Register
r3, Register
r4) {
3284 if (r1.is(r2))
return true;
3285 if (r1.is(r3))
return true;
3286 if (r1.is(r4))
return true;
3287 if (r2.is(r3))
return true;
3288 if (r2.is(r4))
return true;
3289 if (r3.is(r4))
return true;
3294 CodePatcher::CodePatcher(
byte* address,
int size)
3295 : address_(address),
3297 masm_(
NULL, address, size + Assembler::kGap) {
3301 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
3305 CodePatcher::~CodePatcher() {
3307 CPU::FlushICache(address_, size_);
3310 ASSERT(masm_.pc_ == address_ + size_);
3311 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
3315 void MacroAssembler::CheckPageFlag(
3320 Label* condition_met,
3321 Label::Distance condition_met_distance) {
3323 if (scratch.is(
object)) {
3324 and_(scratch, Immediate(~Page::kPageAlignmentMask));
3326 mov(scratch, Immediate(~Page::kPageAlignmentMask));
3327 and_(scratch,
object);
3330 test_b(Operand(scratch, MemoryChunk::kFlagsOffset),
3331 static_cast<uint8_t>(mask));
3333 test(Operand(scratch, MemoryChunk::kFlagsOffset), Immediate(mask));
3335 j(cc, condition_met, condition_met_distance);
3339 void MacroAssembler::CheckPageFlagForMap(
3343 Label* condition_met,
3344 Label::Distance condition_met_distance) {
3346 Page* page = Page::FromAddress(map->address());
3347 ExternalReference reference(ExternalReference::page_flags(page));
3350 ASSERT(!isolate()->heap()->mark_compact_collector()->
3351 IsOnEvacuationCandidate(*map));
3353 test_b(Operand::StaticVariable(reference), static_cast<uint8_t>(mask));
3355 test(Operand::StaticVariable(reference), Immediate(mask));
3357 j(cc, condition_met, condition_met_distance);
3361 void MacroAssembler::CheckMapDeprecated(Handle<Map> map,
3363 Label* if_deprecated) {
3364 if (map->CanBeDeprecated()) {
3366 mov(scratch,
FieldOperand(scratch, Map::kBitField3Offset));
3367 and_(scratch, Immediate(Smi::FromInt(Map::Deprecated::kMask)));
3373 void MacroAssembler::JumpIfBlack(Register
object,
3377 Label::Distance on_black_near) {
3378 HasColor(
object, scratch0, scratch1,
3379 on_black, on_black_near,
3381 ASSERT(strcmp(Marking::kBlackBitPattern,
"10") == 0);
3385 void MacroAssembler::HasColor(Register
object,
3386 Register bitmap_scratch,
3387 Register mask_scratch,
3389 Label::Distance has_color_distance,
3394 GetMarkBits(
object, bitmap_scratch, mask_scratch);
3396 Label other_color, word_boundary;
3397 test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
3398 j(first_bit == 1 ?
zero :
not_zero, &other_color, Label::kNear);
3399 add(mask_scratch, mask_scratch);
3400 j(
zero, &word_boundary, Label::kNear);
3401 test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
3402 j(second_bit == 1 ?
not_zero :
zero, has_color, has_color_distance);
3403 jmp(&other_color, Label::kNear);
3405 bind(&word_boundary);
3406 test_b(Operand(bitmap_scratch, MemoryChunk::kHeaderSize + kPointerSize), 1);
3408 j(second_bit == 1 ?
not_zero :
zero, has_color, has_color_distance);
3413 void MacroAssembler::GetMarkBits(Register addr_reg,
3414 Register bitmap_reg,
3415 Register mask_reg) {
3417 mov(bitmap_reg, Immediate(~Page::kPageAlignmentMask));
3418 and_(bitmap_reg, addr_reg);
3424 (Page::kPageAlignmentMask >> shift) & ~(Bitmap::kBytesPerCell - 1));
3426 add(bitmap_reg,
ecx);
3429 and_(
ecx, (1 << Bitmap::kBitsPerCellLog2) - 1);
3430 mov(mask_reg, Immediate(1));
3435 void MacroAssembler::EnsureNotWhite(
3437 Register bitmap_scratch,
3438 Register mask_scratch,
3439 Label* value_is_white_and_not_data,
3440 Label::Distance distance) {
3442 GetMarkBits(value, bitmap_scratch, mask_scratch);
3445 ASSERT(strcmp(Marking::kWhiteBitPattern,
"00") == 0);
3446 ASSERT(strcmp(Marking::kBlackBitPattern,
"10") == 0);
3447 ASSERT(strcmp(Marking::kGreyBitPattern,
"11") == 0);
3448 ASSERT(strcmp(Marking::kImpossibleBitPattern,
"01") == 0);
3454 test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
3457 if (emit_debug_code()) {
3462 add(mask_scratch, mask_scratch);
3463 test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
3464 j(
zero, &ok, Label::kNear);
3473 Register length =
ecx;
3474 Label not_heap_number;
3475 Label is_data_object;
3479 cmp(map, isolate()->factory()->heap_number_map());
3480 j(
not_equal, ¬_heap_number, Label::kNear);
3481 mov(length, Immediate(HeapNumber::kSize));
3482 jmp(&is_data_object, Label::kNear);
3484 bind(¬_heap_number);
3490 Register instance_type =
ecx;
3491 movzx_b(instance_type,
FieldOperand(map, Map::kInstanceTypeOffset));
3493 j(
not_zero, value_is_white_and_not_data);
3503 j(
zero, ¬_external, Label::kNear);
3504 mov(length, Immediate(ExternalString::kSize));
3505 jmp(&is_data_object, Label::kNear);
3507 bind(¬_external);
3512 add(length, Immediate(0x04));
3516 ASSERT_EQ(SeqOneByteString::kMaxSize, SeqTwoByteString::kMaxSize);
3517 ASSERT(SeqOneByteString::kMaxSize <=
3518 static_cast<int>(0xffffffffu >> (2 +
kSmiTagSize)));
3519 imul(length,
FieldOperand(value, String::kLengthOffset));
3524 bind(&is_data_object);
3527 or_(Operand(bitmap_scratch, MemoryChunk::kHeaderSize), mask_scratch);
3529 and_(bitmap_scratch, Immediate(~Page::kPageAlignmentMask));
3530 add(Operand(bitmap_scratch, MemoryChunk::kLiveBytesOffset),
3532 if (emit_debug_code()) {
3533 mov(length, Operand(bitmap_scratch, MemoryChunk::kLiveBytesOffset));
3534 cmp(length, Operand(bitmap_scratch, MemoryChunk::kSizeOffset));
3535 Check(
less_equal, kLiveBytesCountOverflowChunkSize);
3542 void MacroAssembler::EnumLength(Register dst, Register map) {
3545 and_(dst, Immediate(Smi::FromInt(Map::EnumLengthBits::kMask)));
3549 void MacroAssembler::CheckEnumCache(Label* call_runtime) {
3558 cmp(
edx, Immediate(Smi::FromInt(kInvalidEnumCacheSentinel)));
3559 j(
equal, call_runtime);
3568 cmp(
edx, Immediate(Smi::FromInt(0)));
3577 cmp(
ecx, isolate()->factory()->empty_fixed_array());
3578 j(
equal, &no_elements);
3581 cmp(
ecx, isolate()->factory()->empty_slow_element_dictionary());
3586 cmp(
ecx, isolate()->factory()->null_value());
3591 void MacroAssembler::TestJSArrayForAllocationMemento(
3592 Register receiver_reg,
3593 Register scratch_reg,
3594 Label* no_memento_found) {
3595 ExternalReference new_space_start =
3596 ExternalReference::new_space_start(isolate());
3597 ExternalReference new_space_allocation_top =
3598 ExternalReference::new_space_allocation_top_address(isolate());
3600 lea(scratch_reg, Operand(receiver_reg,
3602 cmp(scratch_reg, Immediate(new_space_start));
3603 j(
less, no_memento_found);
3604 cmp(scratch_reg, Operand::StaticVariable(new_space_allocation_top));
3606 cmp(
MemOperand(scratch_reg, -AllocationMemento::kSize),
3607 Immediate(isolate()->factory()->allocation_memento_map()));
3611 void MacroAssembler::JumpIfDictionaryInPrototypeChain(
3616 ASSERT(!scratch1.is(scratch0));
3617 Factory* factory = isolate()->factory();
3618 Register current = scratch0;
3622 mov(current,
object);
3626 mov(current,
FieldOperand(current, HeapObject::kMapOffset));
3627 mov(scratch1,
FieldOperand(current, Map::kBitField2Offset));
3628 and_(scratch1, Map::kElementsKindMask);
3629 shr(scratch1, Map::kElementsKindShift);
3632 mov(current,
FieldOperand(current, Map::kPrototypeOffset));
3633 cmp(current, Immediate(factory->null_value()));
3638 void MacroAssembler::TruncatingDiv(Register dividend,
int32_t divisor) {
3641 MultiplierAndShift ms(divisor);
3642 mov(
eax, Immediate(ms.multiplier()));
3644 if (divisor > 0 && ms.multiplier() < 0) add(
edx, dividend);
3645 if (divisor < 0 && ms.multiplier() > 0) sub(
edx, dividend);
3646 if (ms.shift() > 0) sar(
edx, ms.shift());
3655 #endif // V8_TARGET_ARCH_IA32
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter NULL
const intptr_t kSmiTagMask
const intptr_t kDoubleAlignmentMask
const uint32_t kNaNOrInfinityLowerBoundUpper32
TypeImpl< ZoneTypeConfig > Type
const uint32_t kIsNotInternalizedMask
#define ASSERT(condition)
const int kPointerSizeLog2
const uint32_t kStringRepresentationMask
PerThreadAssertScopeDebugOnly< DEFERRED_HANDLE_DEREFERENCE_ASSERT, true > AllowDeferredHandleDereference
const bool FLAG_enable_slow_asserts
const intptr_t kObjectAlignmentMask
const intptr_t kHeapObjectTagMask
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in only print modified registers Don t break for ASM_UNIMPLEMENTED_BREAK macros print stack trace when an illegal exception is thrown randomize hashes to avoid predictable hash Fixed seed to use to hash property Print the time it takes to deserialize the snapshot testing_bool_flag testing_int_flag string flag tmp file in which to serialize heap Print the time it takes to lazily compile hydrogen code stubs concurrent_recompilation concurrent_sweeping Print usage including flags
bool is_intn(int64_t x, unsigned n)
kInstanceClassNameOffset flag
const uint32_t kNotStringTag
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object size
const uint32_t kIsIndirectStringMask
Operand FieldOperand(Register object, int offset)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf map
bool IsAligned(T value, U alignment)
const uint32_t kHoleNanLower32
int TenToThe(int exponent)
MacroAssembler(Isolate *isolate, void *buffer, int size)
const uint32_t kStringTag
const uint32_t kInternalizedTag
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array shift
void Load(const v8::FunctionCallbackInfo< v8::Value > &args)
bool is(Register reg) const
const uint32_t kIsNotStringMask
const char * GetBailoutReason(BailoutReason reason)
const int kNumSafepointRegisters
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
const intptr_t kPointerAlignment
Operand ApiParameterOperand(int index)
void CopyBytes(uint8_t *target, uint8_t *source)
Condition NegateCondition(Condition cond)
#define ASSERT_EQ(v1, v2)
const uint32_t kOneByteStringTag
const uint32_t kIsIndirectStringTag
#define RUNTIME_ENTRY(name, nargs, ressize)
#define STATIC_ASSERT(test)
const intptr_t kDoubleAlignment
const uint32_t kStringEncodingMask
bool is_uintn(int64_t x, unsigned n)
bool AreAliased(const CPURegister ®1, const CPURegister ®2, const CPURegister ®3=NoReg, const CPURegister ®4=NoReg, const CPURegister ®5=NoReg, const CPURegister ®6=NoReg, const CPURegister ®7=NoReg, const CPURegister ®8=NoReg)