32 #if defined(V8_TARGET_ARCH_ARM)
43 : Assembler(arg_isolate, buffer, size),
44 generating_stub_(
false),
45 allow_stub_calls_(
true),
47 if (isolate() !=
NULL) {
48 code_object_ = Handle<Object>(isolate()->heap()->undefined_value(),
56 #if defined(__thumb__) && !defined(USE_THUMB_INTERWORK)
57 #error "flag -mthumb-interwork missing"
64 #if defined(USE_THUMB_INTERWORK) && !defined(CAN_USE_THUMB_INSTRUCTIONS)
65 # error "For thumb inter-working we require an architecture which supports blx"
70 #if defined(USE_THUMB_INTERWORK)
75 void MacroAssembler::Jump(Register target,
Condition cond) {
84 void MacroAssembler::Jump(intptr_t target, RelocInfo::Mode rmode,
87 mov(
ip, Operand(target, rmode));
90 mov(
pc, Operand(target, rmode),
LeaveCC, cond);
95 void MacroAssembler::Jump(
Address target, RelocInfo::Mode rmode,
97 ASSERT(!RelocInfo::IsCodeTarget(rmode));
98 Jump(reinterpret_cast<intptr_t>(target), rmode, cond);
102 void MacroAssembler::Jump(Handle<Code> code, RelocInfo::Mode rmode,
104 ASSERT(RelocInfo::IsCodeTarget(rmode));
106 Jump(reinterpret_cast<intptr_t>(code.location()), rmode, cond);
110 int MacroAssembler::CallSize(Register target,
Condition cond) {
114 return 2 * kInstrSize;
119 void MacroAssembler::Call(Register target,
Condition cond) {
121 BlockConstPoolScope block_const_pool(
this);
131 ASSERT_EQ(CallSize(target, cond), SizeOfCodeGeneratedSince(&start));
135 int MacroAssembler::CallSize(
137 int size = 2 * kInstrSize;
139 intptr_t immediate =
reinterpret_cast<intptr_t
>(target);
140 if (!Operand(immediate, rmode).is_single_instruction(mov_instr)) {
147 void MacroAssembler::Call(
Address target,
148 RelocInfo::Mode rmode,
151 BlockConstPoolScope block_const_pool(
this);
163 positions_recorder()->WriteRecordedPositions();
165 mov(
ip, Operand(reinterpret_cast<int32_t>(target), rmode));
168 ASSERT(kCallTargetAddressOffset == 2 * kInstrSize);
173 mov(
pc, Operand(reinterpret_cast<int32_t>(target), rmode),
LeaveCC, cond);
174 ASSERT(kCallTargetAddressOffset == kInstrSize);
176 ASSERT_EQ(CallSize(target, rmode, cond), SizeOfCodeGeneratedSince(&start));
180 int MacroAssembler::CallSize(Handle<Code> code,
181 RelocInfo::Mode rmode,
184 return CallSize(reinterpret_cast<Address>(code.location()), rmode, cond);
188 void MacroAssembler::Call(Handle<Code> code,
189 RelocInfo::Mode rmode,
194 ASSERT(RelocInfo::IsCodeTarget(rmode));
195 if (rmode == RelocInfo::CODE_TARGET && ast_id !=
kNoASTId) {
196 SetRecordedAstId(ast_id);
197 rmode = RelocInfo::CODE_TARGET_WITH_ID;
200 Call(reinterpret_cast<Address>(code.location()), rmode, cond);
201 ASSERT_EQ(CallSize(code, rmode, ast_id, cond),
202 SizeOfCodeGeneratedSince(&start));
206 void MacroAssembler::Ret(
Condition cond) {
215 void MacroAssembler::Drop(
int count,
Condition cond) {
222 void MacroAssembler::Ret(
int drop,
Condition cond) {
228 void MacroAssembler::Swap(Register reg1,
233 eor(reg1, reg1, Operand(reg2),
LeaveCC, cond);
234 eor(reg2, reg2, Operand(reg1),
LeaveCC, cond);
235 eor(reg1, reg1, Operand(reg2),
LeaveCC, cond);
237 mov(scratch, reg1,
LeaveCC, cond);
238 mov(reg1, reg2,
LeaveCC, cond);
239 mov(reg2, scratch,
LeaveCC, cond);
244 void MacroAssembler::Call(Label* target) {
249 void MacroAssembler::Push(Handle<Object> handle) {
250 mov(
ip, Operand(handle));
255 void MacroAssembler::Move(Register dst, Handle<Object> value) {
256 mov(dst, Operand(value));
260 void MacroAssembler::Move(Register dst, Register src,
Condition cond) {
269 CpuFeatures::Scope scope(
VFP3);
276 void MacroAssembler::And(Register dst, Register src1,
const Operand& src2,
278 if (!src2.is_reg() &&
279 !src2.must_use_constant_pool() &&
280 src2.immediate() == 0) {
283 }
else if (!src2.is_single_instruction() &&
284 !src2.must_use_constant_pool() &&
285 CpuFeatures::IsSupported(
ARMv7) &&
288 WhichPowerOf2(static_cast<uint32_t>(src2.immediate()) + 1), cond);
291 and_(dst, src1, src2,
LeaveCC, cond);
296 void MacroAssembler::Ubfx(Register dst, Register src1,
int lsb,
int width,
299 if (!CpuFeatures::IsSupported(
ARMv7)) {
300 int mask = (1 << (width + lsb)) - 1 - ((1 << lsb) - 1);
301 and_(dst, src1, Operand(mask),
LeaveCC, cond);
303 mov(dst, Operand(dst,
LSR, lsb),
LeaveCC, cond);
306 ubfx(dst, src1, lsb, width, cond);
311 void MacroAssembler::Sbfx(Register dst, Register src1,
int lsb,
int width,
314 if (!CpuFeatures::IsSupported(
ARMv7)) {
315 int mask = (1 << (width + lsb)) - 1 - ((1 << lsb) - 1);
316 and_(dst, src1, Operand(mask),
LeaveCC, cond);
317 int shift_up = 32 - lsb - width;
318 int shift_down = lsb + shift_up;
320 mov(dst, Operand(dst,
LSL, shift_up),
LeaveCC, cond);
322 if (shift_down != 0) {
323 mov(dst, Operand(dst,
ASR, shift_down),
LeaveCC, cond);
326 sbfx(dst, src1, lsb, width, cond);
331 void MacroAssembler::Bfi(Register dst,
337 ASSERT(0 <= lsb && lsb < 32);
338 ASSERT(0 <= width && width < 32);
341 if (width == 0)
return;
342 if (!CpuFeatures::IsSupported(
ARMv7)) {
343 int mask = (1 << (width + lsb)) - 1 - ((1 << lsb) - 1);
344 bic(dst, dst, Operand(mask));
345 and_(scratch, src, Operand((1 << width) - 1));
346 mov(scratch, Operand(scratch,
LSL, lsb));
347 orr(dst, dst, scratch);
349 bfi(dst, src, lsb, width, cond);
354 void MacroAssembler::Bfc(Register dst,
int lsb,
int width,
Condition cond) {
356 if (!CpuFeatures::IsSupported(
ARMv7)) {
357 int mask = (1 << (width + lsb)) - 1 - ((1 << lsb) - 1);
358 bic(dst, dst, Operand(mask));
360 bfc(dst, lsb, width, cond);
365 void MacroAssembler::Usat(Register dst,
int satpos,
const Operand& src,
367 if (!CpuFeatures::IsSupported(
ARMv7)) {
369 ASSERT((satpos >= 0) && (satpos <= 31));
373 ASSERT((src.shift_op() ==
ASR) || (src.shift_op() ==
LSL));
377 int satval = (1 << satpos) - 1;
382 if (!(src.is_reg() && dst.is(src.rm()))) {
385 tst(dst, Operand(~satval));
391 usat(dst, satpos, src, cond);
396 void MacroAssembler::LoadRoot(Register destination,
397 Heap::RootListIndex index,
403 void MacroAssembler::StoreRoot(Register source,
404 Heap::RootListIndex index,
410 void MacroAssembler::LoadHeapObject(Register result,
411 Handle<HeapObject>
object) {
412 if (isolate()->heap()->InNewSpace(*
object)) {
413 Handle<JSGlobalPropertyCell> cell =
414 isolate()->factory()->NewJSGlobalPropertyCell(
object);
415 mov(result, Operand(cell));
416 ldr(result,
FieldMemOperand(result, JSGlobalPropertyCell::kValueOffset));
418 mov(result, Operand(
object));
423 void MacroAssembler::InNewSpace(Register
object,
428 and_(scratch,
object, Operand(ExternalReference::new_space_mask(isolate())));
429 cmp(scratch, Operand(ExternalReference::new_space_start(isolate())));
434 void MacroAssembler::RecordWriteField(
449 JumpIfSmi(value, &done);
457 if (emit_debug_code()) {
461 stop(
"Unaligned cell in write barrier");
470 remembered_set_action,
477 if (emit_debug_code()) {
478 mov(value, Operand(BitCast<int32_t>(
kZapValue + 4)));
479 mov(dst, Operand(BitCast<int32_t>(
kZapValue + 8)));
487 void MacroAssembler::RecordWrite(Register
object,
499 if (emit_debug_code()) {
502 Check(
eq,
"Wrong address or value passed to RecordWrite");
515 MemoryChunk::kPointersToHereAreInterestingMask,
518 CheckPageFlag(
object,
520 MemoryChunk::kPointersFromHereAreInterestingMask,
528 RecordWriteStub stub(
object, value, address, remembered_set_action, fp_mode);
538 if (emit_debug_code()) {
539 mov(address, Operand(BitCast<int32_t>(
kZapValue + 12)));
540 mov(value, Operand(BitCast<int32_t>(
kZapValue + 16)));
545 void MacroAssembler::RememberedSetHelper(Register
object,
549 RememberedSetFinalAction and_then) {
551 if (emit_debug_code()) {
553 JumpIfNotInNewSpace(
object, scratch, &ok);
554 stop(
"Remembered set pointer is in new space");
558 ExternalReference store_buffer =
559 ExternalReference::store_buffer_top(isolate());
560 mov(
ip, Operand(store_buffer));
568 tst(scratch, Operand(StoreBuffer::kStoreBufferOverflowBit));
569 if (and_then == kFallThroughAtEnd) {
572 ASSERT(and_then == kReturnAtEnd);
576 StoreBufferOverflowStub store_buffer_overflow =
577 StoreBufferOverflowStub(fp_mode);
578 CallStub(&store_buffer_overflow);
581 if (and_then == kReturnAtEnd) {
588 void MacroAssembler::PushSafepointRegisters() {
600 void MacroAssembler::PopSafepointRegisters() {
607 void MacroAssembler::PushSafepointRegistersAndDoubles() {
608 PushSafepointRegisters();
609 sub(
sp,
sp, Operand(DwVfpRegister::kNumAllocatableRegisters *
611 for (
int i = 0; i < DwVfpRegister::kNumAllocatableRegisters; i++) {
612 vstr(DwVfpRegister::FromAllocationIndex(i),
sp, i *
kDoubleSize);
617 void MacroAssembler::PopSafepointRegistersAndDoubles() {
618 for (
int i = 0; i < DwVfpRegister::kNumAllocatableRegisters; i++) {
619 vldr(DwVfpRegister::FromAllocationIndex(i),
sp, i *
kDoubleSize);
621 add(
sp,
sp, Operand(DwVfpRegister::kNumAllocatableRegisters *
623 PopSafepointRegisters();
626 void MacroAssembler::StoreToSafepointRegistersAndDoublesSlot(Register src,
628 str(src, SafepointRegistersAndDoublesSlot(dst));
632 void MacroAssembler::StoreToSafepointRegisterSlot(Register src, Register dst) {
633 str(src, SafepointRegisterSlot(dst));
637 void MacroAssembler::LoadFromSafepointRegisterSlot(Register dst, Register src) {
638 ldr(dst, SafepointRegisterSlot(src));
642 int MacroAssembler::SafepointRegisterStackIndex(
int reg_code) {
650 MemOperand MacroAssembler::SafepointRegisterSlot(Register reg) {
655 MemOperand MacroAssembler::SafepointRegistersAndDoublesSlot(Register reg) {
657 int doubles_size = DwVfpRegister::kNumAllocatableRegisters *
kDoubleSize;
658 int register_offset = SafepointRegisterStackIndex(reg.code()) *
kPointerSize;
663 void MacroAssembler::Ldrd(Register dst1, Register dst2,
675 if (CpuFeatures::IsSupported(
ARMv7)) {
676 CpuFeatures::Scope scope(
ARMv7);
677 ldrd(dst1, dst2, src, cond);
681 src2.set_offset(src2.offset() + 4);
682 if (dst1.is(src.rn())) {
683 ldr(dst2, src2, cond);
684 ldr(dst1, src, cond);
686 ldr(dst1, src, cond);
687 ldr(dst2, src2, cond);
691 if (dst1.is(src.rn())) {
693 ldr(dst1, src, cond);
696 src2.set_offset(src2.offset() - 4);
698 ldr(dst2, src2, cond);
705 void MacroAssembler::Strd(Register src1, Register src2,
717 if (CpuFeatures::IsSupported(
ARMv7)) {
718 CpuFeatures::Scope scope(
ARMv7);
719 strd(src1, src2, dst, cond);
723 dst2.set_offset(dst2.offset() + 4);
724 str(src1, dst, cond);
725 str(src2, dst2, cond);
728 dst2.set_offset(dst2.offset() - 4);
730 str(src2, dst2, cond);
736 void MacroAssembler::ClearFPSCRBits(
const uint32_t bits_to_clear,
737 const Register scratch,
740 bic(scratch, scratch, Operand(bits_to_clear),
LeaveCC, cond);
745 void MacroAssembler::VFPCompareAndSetFlags(
const DwVfpRegister src1,
746 const DwVfpRegister src2,
749 VFPCompareAndLoadFlags(src1, src2,
pc, cond);
752 void MacroAssembler::VFPCompareAndSetFlags(
const DwVfpRegister src1,
756 VFPCompareAndLoadFlags(src1, src2,
pc, cond);
760 void MacroAssembler::VFPCompareAndLoadFlags(
const DwVfpRegister src1,
761 const DwVfpRegister src2,
762 const Register fpscr_flags,
765 vcmp(src1, src2, cond);
766 vmrs(fpscr_flags, cond);
769 void MacroAssembler::VFPCompareAndLoadFlags(
const DwVfpRegister src1,
771 const Register fpscr_flags,
774 vcmp(src1, src2, cond);
775 vmrs(fpscr_flags, cond);
778 void MacroAssembler::Vmov(
const DwVfpRegister dst,
782 static const DoubleRepresentation minus_zero(-0.0);
783 static const DoubleRepresentation
zero(0.0);
784 DoubleRepresentation value(imm);
786 if (value.bits ==
zero.bits) {
788 }
else if (value.bits == minus_zero.bits) {
791 vmov(dst, imm, cond);
796 void MacroAssembler::EnterFrame(StackFrame::Type
type) {
799 mov(
ip, Operand(Smi::FromInt(type)));
801 mov(
ip, Operand(CodeObject()));
807 void MacroAssembler::LeaveFrame(StackFrame::Type type) {
819 void MacroAssembler::EnterExitFrame(
bool save_doubles,
int stack_space) {
825 mov(
fp, Operand(
sp));
828 if (emit_debug_code()) {
832 mov(
ip, Operand(CodeObject()));
836 mov(
ip, Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate())));
838 mov(
ip, Operand(ExternalReference(Isolate::kContextAddress, isolate())));
843 DwVfpRegister first =
d0;
846 vstm(
db_w,
sp, first, last);
854 const int frame_alignment = MacroAssembler::ActivationFrameAlignment();
856 if (frame_alignment > 0) {
858 and_(
sp,
sp, Operand(-frame_alignment));
868 void MacroAssembler::InitializeNewString(Register
string,
870 Heap::RootListIndex map_index,
874 LoadRoot(scratch2, map_index);
876 mov(scratch1, Operand(String::kEmptyHashField));
882 int MacroAssembler::ActivationFrameAlignment() {
883 #if defined(V8_HOST_ARCH_ARM)
888 return OS::ActivationFrameAlignment();
889 #else // defined(V8_HOST_ARCH_ARM)
894 return FLAG_sim_stack_alignment;
895 #endif // defined(V8_HOST_ARCH_ARM)
899 void MacroAssembler::LeaveExitFrame(
bool save_doubles,
900 Register argument_count) {
906 DwVfpRegister first =
d0;
909 vldm(
ia,
r3, first, last);
914 mov(
ip, Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate())));
918 mov(
ip, Operand(ExternalReference(Isolate::kContextAddress, isolate())));
925 mov(
sp, Operand(
fp));
927 if (argument_count.is_valid()) {
932 void MacroAssembler::GetCFunctionDoubleResult(
const DoubleRegister dst) {
933 if (use_eabi_hardfloat()) {
941 void MacroAssembler::SetCallKind(Register dst,
CallKind call_kind) {
948 mov(dst, Operand(Smi::FromInt(1)));
950 mov(dst, Operand(Smi::FromInt(0)));
955 void MacroAssembler::InvokePrologue(
const ParameterCount& expected,
956 const ParameterCount& actual,
957 Handle<Code> code_constant,
960 bool* definitely_mismatches,
962 const CallWrapper& call_wrapper,
964 bool definitely_matches =
false;
965 *definitely_mismatches =
false;
966 Label regular_invoke;
978 ASSERT(actual.is_immediate() || actual.reg().is(
r0));
979 ASSERT(expected.is_immediate() || expected.reg().is(
r2));
980 ASSERT((!code_constant.is_null() && code_reg.is(
no_reg)) || code_reg.is(
r3));
982 if (expected.is_immediate()) {
983 ASSERT(actual.is_immediate());
984 if (expected.immediate() == actual.immediate()) {
985 definitely_matches =
true;
987 mov(
r0, Operand(actual.immediate()));
988 const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
989 if (expected.immediate() == sentinel) {
994 definitely_matches =
true;
996 *definitely_mismatches =
true;
997 mov(
r2, Operand(expected.immediate()));
1001 if (actual.is_immediate()) {
1002 cmp(expected.reg(), Operand(actual.immediate()));
1003 b(
eq, ®ular_invoke);
1004 mov(
r0, Operand(actual.immediate()));
1006 cmp(expected.reg(), Operand(actual.reg()));
1007 b(
eq, ®ular_invoke);
1011 if (!definitely_matches) {
1012 if (!code_constant.is_null()) {
1013 mov(
r3, Operand(code_constant));
1017 Handle<Code> adaptor =
1018 isolate()->builtins()->ArgumentsAdaptorTrampoline();
1020 call_wrapper.BeforeCall(CallSize(adaptor));
1021 SetCallKind(
r5, call_kind);
1023 call_wrapper.AfterCall();
1024 if (!*definitely_mismatches) {
1028 SetCallKind(
r5, call_kind);
1029 Jump(adaptor, RelocInfo::CODE_TARGET);
1031 bind(®ular_invoke);
1036 void MacroAssembler::InvokeCode(Register code,
1037 const ParameterCount& expected,
1038 const ParameterCount& actual,
1040 const CallWrapper& call_wrapper,
1046 bool definitely_mismatches =
false;
1047 InvokePrologue(expected, actual, Handle<Code>::null(), code,
1048 &done, &definitely_mismatches, flag,
1049 call_wrapper, call_kind);
1050 if (!definitely_mismatches) {
1052 call_wrapper.BeforeCall(CallSize(code));
1053 SetCallKind(
r5, call_kind);
1055 call_wrapper.AfterCall();
1058 SetCallKind(
r5, call_kind);
1069 void MacroAssembler::InvokeCode(Handle<Code> code,
1070 const ParameterCount& expected,
1071 const ParameterCount& actual,
1072 RelocInfo::Mode rmode,
1079 bool definitely_mismatches =
false;
1080 InvokePrologue(expected, actual, code,
no_reg,
1081 &done, &definitely_mismatches, flag,
1082 NullCallWrapper(), call_kind);
1083 if (!definitely_mismatches) {
1085 SetCallKind(
r5, call_kind);
1088 SetCallKind(
r5, call_kind);
1099 void MacroAssembler::InvokeFunction(Register fun,
1100 const ParameterCount& actual,
1102 const CallWrapper& call_wrapper,
1110 Register expected_reg =
r2;
1111 Register code_reg =
r3;
1117 SharedFunctionInfo::kFormalParameterCountOffset));
1122 ParameterCount expected(expected_reg);
1123 InvokeCode(code_reg, expected, actual, flag, call_wrapper, call_kind);
1127 void MacroAssembler::InvokeFunction(Handle<JSFunction>
function,
1128 const ParameterCount& actual,
1130 const CallWrapper& call_wrapper,
1136 LoadHeapObject(
r1,
function);
1139 ParameterCount expected(function->shared()->formal_parameter_count());
1144 InvokeCode(
r3, expected, actual, flag, call_wrapper, call_kind);
1148 void MacroAssembler::IsObjectJSObjectType(Register heap_object,
1153 IsInstanceJSObjectType(map, scratch, fail);
1157 void MacroAssembler::IsInstanceJSObjectType(Register map,
1168 void MacroAssembler::IsObjectJSStringType(Register
object,
1180 #ifdef ENABLE_DEBUGGER_SUPPORT
1181 void MacroAssembler::DebugBreak() {
1183 mov(
r1, Operand(ExternalReference(Runtime::kDebugBreak, isolate())));
1185 ASSERT(AllowThisStubCall(&ces));
1191 void MacroAssembler::PushTryHandler(StackHandler::Kind kind,
1192 int handler_index) {
1205 StackHandler::IndexField::encode(handler_index) |
1206 StackHandler::KindField::encode(kind);
1207 mov(
r5, Operand(CodeObject()));
1208 mov(
r6, Operand(state));
1211 if (kind == StackHandler::JS_ENTRY) {
1212 mov(
r7, Operand(Smi::FromInt(0)));
1220 mov(
r6, Operand(ExternalReference(Isolate::kHandlerAddress, isolate())));
1228 void MacroAssembler::PopTryHandler() {
1231 mov(
ip, Operand(ExternalReference(Isolate::kHandlerAddress, isolate())));
1237 void MacroAssembler::JumpToHandlerEntry() {
1243 mov(
r2, Operand(
r2,
LSR, StackHandler::kKindWidth));
1250 void MacroAssembler::Throw(Register value) {
1260 if (!value.is(
r0)) {
1264 mov(
r3, Operand(ExternalReference(Isolate::kHandlerAddress, isolate())));
1280 JumpToHandlerEntry();
1284 void MacroAssembler::ThrowUncatchable(Register value) {
1294 if (!value.is(
r0)) {
1298 mov(
r3, Operand(ExternalReference(Isolate::kHandlerAddress, isolate())));
1302 Label fetch_next, check_kind;
1310 tst(
r2, Operand(StackHandler::KindField::kMask));
1320 JumpToHandlerEntry();
1324 void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
1327 Label same_contexts;
1329 ASSERT(!holder_reg.is(scratch));
1334 ldr(scratch,
MemOperand(
fp, StandardFrameConstants::kContextOffset));
1338 Check(
ne,
"we should not have an empty lexical context");
1342 int offset = Context::kHeaderSize + Context::GLOBAL_INDEX *
kPointerSize;
1344 ldr(scratch,
FieldMemOperand(scratch, GlobalObject::kGlobalContextOffset));
1347 if (emit_debug_code()) {
1354 LoadRoot(
ip, Heap::kGlobalContextMapRootIndex);
1355 cmp(holder_reg,
ip);
1356 Check(
eq,
"JSGlobalObject::global_context should be a global context.");
1362 cmp(scratch, Operand(
ip));
1363 b(
eq, &same_contexts);
1366 if (emit_debug_code()) {
1371 mov(holder_reg,
ip);
1372 LoadRoot(
ip, Heap::kNullValueRootIndex);
1373 cmp(holder_reg,
ip);
1374 Check(
ne,
"JSGlobalProxy::context() should not be null.");
1377 LoadRoot(
ip, Heap::kGlobalContextMapRootIndex);
1378 cmp(holder_reg,
ip);
1379 Check(
eq,
"JSGlobalObject::global_context should be a global context.");
1389 int token_offset = Context::kHeaderSize +
1394 cmp(scratch, Operand(
ip));
1397 bind(&same_contexts);
1401 void MacroAssembler::GetNumberHash(Register t0, Register scratch) {
1403 LoadRoot(scratch, Heap::kHashSeedRootIndex);
1407 eor(t0, t0, Operand(scratch));
1413 mvn(scratch, Operand(t0));
1414 add(t0, scratch, Operand(t0,
LSL, 15));
1416 eor(t0, t0, Operand(t0,
LSR, 12));
1418 add(t0, t0, Operand(t0,
LSL, 2));
1420 eor(t0, t0, Operand(t0,
LSR, 4));
1422 mov(scratch, Operand(t0,
LSL, 11));
1423 add(t0, t0, Operand(t0,
LSL, 3));
1424 add(t0, t0, scratch);
1426 eor(t0, t0, Operand(t0,
LSR, 16));
1430 void MacroAssembler::LoadFromNumberDictionary(Label* miss,
1459 GetNumberHash(t0, t1);
1462 ldr(t1,
FieldMemOperand(elements, SeededNumberDictionary::kCapacityOffset));
1464 sub(t1, t1, Operand(1));
1467 static const int kProbes = 4;
1468 for (
int i = 0; i < kProbes; i++) {
1473 add(t2, t2, Operand(SeededNumberDictionary::GetProbeOffset(i)));
1475 and_(t2, t2, Operand(t1));
1478 ASSERT(SeededNumberDictionary::kEntrySize == 3);
1479 add(t2, t2, Operand(t2,
LSL, 1));
1484 cmp(key, Operand(
ip));
1485 if (i != kProbes - 1) {
1495 const int kDetailsOffset =
1496 SeededNumberDictionary::kElementsStartOffset + 2 *
kPointerSize;
1498 tst(t1, Operand(Smi::FromInt(PropertyDetails::TypeField::kMask)));
1502 const int kValueOffset =
1503 SeededNumberDictionary::kElementsStartOffset +
kPointerSize;
1508 void MacroAssembler::AllocateInNewSpace(
int object_size,
1514 if (!FLAG_inline_new) {
1515 if (emit_debug_code()) {
1517 mov(result, Operand(0x7091));
1518 mov(scratch1, Operand(0x7191));
1519 mov(scratch2, Operand(0x7291));
1525 ASSERT(!result.is(scratch1));
1526 ASSERT(!result.is(scratch2));
1527 ASSERT(!scratch1.is(scratch2));
1541 ExternalReference new_space_allocation_top =
1542 ExternalReference::new_space_allocation_top_address(isolate());
1543 ExternalReference new_space_allocation_limit =
1544 ExternalReference::new_space_allocation_limit_address(isolate());
1546 reinterpret_cast<intptr_t
>(new_space_allocation_top.address());
1548 reinterpret_cast<intptr_t
>(new_space_allocation_limit.address());
1549 ASSERT((limit - top) == kPointerSize);
1553 Register topaddr = scratch1;
1554 Register obj_size_reg = scratch2;
1555 mov(topaddr, Operand(new_space_allocation_top));
1556 mov(obj_size_reg, Operand(object_size));
1562 ldm(
ia, topaddr, result.bit() |
ip.
bit());
1564 if (emit_debug_code()) {
1570 Check(
eq,
"Unexpected allocation top");
1578 add(scratch2, result, Operand(obj_size_reg),
SetCC);
1580 cmp(scratch2, Operand(
ip));
1591 void MacroAssembler::AllocateInNewSpace(Register object_size,
1597 if (!FLAG_inline_new) {
1598 if (emit_debug_code()) {
1600 mov(result, Operand(0x7091));
1601 mov(scratch1, Operand(0x7191));
1602 mov(scratch2, Operand(0x7291));
1610 ASSERT(!result.is(scratch1));
1611 ASSERT(!result.is(scratch2));
1612 ASSERT(!scratch1.is(scratch2));
1622 ExternalReference new_space_allocation_top =
1623 ExternalReference::new_space_allocation_top_address(isolate());
1624 ExternalReference new_space_allocation_limit =
1625 ExternalReference::new_space_allocation_limit_address(isolate());
1627 reinterpret_cast<intptr_t
>(new_space_allocation_top.address());
1629 reinterpret_cast<intptr_t
>(new_space_allocation_limit.address());
1630 ASSERT((limit - top) == kPointerSize);
1634 Register topaddr = scratch1;
1635 mov(topaddr, Operand(new_space_allocation_top));
1639 if ((flags & RESULT_CONTAINS_TOP) == 0) {
1641 ldm(
ia, topaddr, result.bit() |
ip.
bit());
1643 if (emit_debug_code()) {
1649 Check(
eq,
"Unexpected allocation top");
1658 if ((flags & SIZE_IN_WORDS) != 0) {
1661 add(scratch2, result, Operand(object_size),
SetCC);
1664 cmp(scratch2, Operand(
ip));
1668 if (emit_debug_code()) {
1670 Check(
eq,
"Unaligned allocation in new space");
1675 if ((flags & TAG_OBJECT) != 0) {
1681 void MacroAssembler::UndoAllocationInNewSpace(Register
object,
1683 ExternalReference new_space_allocation_top =
1684 ExternalReference::new_space_allocation_top_address(isolate());
1690 mov(scratch, Operand(new_space_allocation_top));
1692 cmp(
object, scratch);
1693 Check(
lt,
"Undo allocation of non allocated memory");
1696 mov(scratch, Operand(new_space_allocation_top));
1701 void MacroAssembler::AllocateTwoByteString(Register result,
1706 Label* gc_required) {
1710 mov(scratch1, Operand(length,
LSL, 1));
1711 add(scratch1, scratch1,
1716 AllocateInNewSpace(scratch1,
1724 InitializeNewString(result,
1726 Heap::kStringMapRootIndex,
1732 void MacroAssembler::AllocateAsciiString(Register result,
1737 Label* gc_required) {
1742 add(scratch1, length,
1747 AllocateInNewSpace(scratch1,
1755 InitializeNewString(result,
1757 Heap::kAsciiStringMapRootIndex,
1763 void MacroAssembler::AllocateTwoByteConsString(Register result,
1767 Label* gc_required) {
1768 AllocateInNewSpace(ConsString::kSize,
1775 InitializeNewString(result,
1777 Heap::kConsStringMapRootIndex,
1783 void MacroAssembler::AllocateAsciiConsString(Register result,
1787 Label* gc_required) {
1788 AllocateInNewSpace(ConsString::kSize,
1795 InitializeNewString(result,
1797 Heap::kConsAsciiStringMapRootIndex,
1803 void MacroAssembler::AllocateTwoByteSlicedString(Register result,
1807 Label* gc_required) {
1808 AllocateInNewSpace(SlicedString::kSize,
1815 InitializeNewString(result,
1817 Heap::kSlicedStringMapRootIndex,
1823 void MacroAssembler::AllocateAsciiSlicedString(Register result,
1827 Label* gc_required) {
1828 AllocateInNewSpace(SlicedString::kSize,
1835 InitializeNewString(result,
1837 Heap::kSlicedAsciiStringMapRootIndex,
1843 void MacroAssembler::CompareObjectType(Register
object,
1848 CompareInstanceType(map, type_reg, type);
1852 void MacroAssembler::CompareInstanceType(Register map,
1856 cmp(type_reg, Operand(type));
1860 void MacroAssembler::CompareRoot(Register obj,
1861 Heap::RootListIndex index) {
1863 LoadRoot(
ip, index);
1868 void MacroAssembler::CheckFastElements(Register map,
1876 cmp(scratch, Operand(Map::kMaximumBitField2FastHoleyElementValue));
1881 void MacroAssembler::CheckFastObjectElements(Register map,
1889 cmp(scratch, Operand(Map::kMaximumBitField2FastHoleySmiElementValue));
1891 cmp(scratch, Operand(Map::kMaximumBitField2FastHoleyElementValue));
1896 void MacroAssembler::CheckFastSmiElements(Register map,
1902 cmp(scratch, Operand(Map::kMaximumBitField2FastHoleySmiElementValue));
1907 void MacroAssembler::StoreNumberToDoubleElements(Register value_reg,
1909 Register receiver_reg,
1910 Register elements_reg,
1916 Label smi_value, maybe_nan, have_double_value, is_nan, done;
1917 Register mantissa_reg = scratch2;
1918 Register exponent_reg = scratch3;
1921 JumpIfSmi(value_reg, &smi_value);
1926 isolate()->factory()->heap_number_map(),
1933 ldr(exponent_reg,
FieldMemOperand(value_reg, HeapNumber::kExponentOffset));
1934 cmp(exponent_reg, scratch1);
1937 ldr(mantissa_reg,
FieldMemOperand(value_reg, HeapNumber::kMantissaOffset));
1939 bind(&have_double_value);
1940 add(scratch1, elements_reg,
1942 str(mantissa_reg,
FieldMemOperand(scratch1, FixedDoubleArray::kHeaderSize));
1943 uint32_t offset = FixedDoubleArray::kHeaderSize +
sizeof(
kHoleNanLower32);
1951 ldr(mantissa_reg,
FieldMemOperand(value_reg, HeapNumber::kMantissaOffset));
1952 cmp(mantissa_reg, Operand(0));
1953 b(
eq, &have_double_value);
1956 uint64_t nan_int64 = BitCast<uint64_t>(
1957 FixedDoubleArray::canonical_not_the_hole_nan_as_double());
1958 mov(mantissa_reg, Operand(static_cast<uint32_t>(nan_int64)));
1959 mov(exponent_reg, Operand(static_cast<uint32_t>(nan_int64 >> 32)));
1960 jmp(&have_double_value);
1963 add(scratch1, elements_reg,
1965 add(scratch1, scratch1,
1969 FloatingPointHelper::Destination destination;
1970 if (CpuFeatures::IsSupported(
VFP3)) {
1971 destination = FloatingPointHelper::kVFPRegisters;
1973 destination = FloatingPointHelper::kCoreRegisters;
1976 Register untagged_value = receiver_reg;
1977 SmiUntag(untagged_value, value_reg);
1978 FloatingPointHelper::ConvertIntToDouble(
this,
1986 if (destination == FloatingPointHelper::kVFPRegisters) {
1987 CpuFeatures::Scope scope(
VFP3);
1988 vstr(
d0, scratch1, 0);
1991 str(exponent_reg,
MemOperand(scratch1, Register::kSizeInBytes));
1997 void MacroAssembler::CompareMap(Register obj,
2000 Label* early_success,
2003 CompareMap(scratch, map, early_success, mode);
2007 void MacroAssembler::CompareMap(Register obj_map,
2009 Label* early_success,
2011 cmp(obj_map, Operand(map));
2016 Map* current_map = *map;
2019 current_map = current_map->LookupElementsTransitionMap(kind);
2020 if (!current_map)
break;
2021 b(
eq, early_success);
2022 cmp(obj_map, Operand(Handle<Map>(current_map)));
2029 void MacroAssembler::CheckMap(Register obj,
2036 JumpIfSmi(obj, fail);
2040 CompareMap(obj, scratch, map, &success, mode);
2046 void MacroAssembler::CheckMap(Register obj,
2048 Heap::RootListIndex index,
2052 JumpIfSmi(obj, fail);
2055 LoadRoot(
ip, index);
2061 void MacroAssembler::DispatchMap(Register obj,
2064 Handle<Code> success,
2068 JumpIfSmi(obj, &fail);
2071 mov(
ip, Operand(map));
2073 Jump(success, RelocInfo::CODE_TARGET,
eq);
2078 void MacroAssembler::TryGetFunctionPrototype(Register
function,
2082 bool miss_on_bound_function) {
2084 JumpIfSmi(
function, miss);
2090 if (miss_on_bound_function) {
2096 Operand(Smi::FromInt(1 << SharedFunctionInfo::kBoundFunction)));
2103 tst(scratch, Operand(1 << Map::kHasNonInstancePrototype));
2104 b(
ne, &non_instance);
2113 LoadRoot(
ip, Heap::kTheHoleValueRootIndex);
2119 CompareObjectType(result, scratch, scratch,
MAP_TYPE);
2128 bind(&non_instance);
2136 void MacroAssembler::CallStub(CodeStub* stub,
Condition cond) {
2137 ASSERT(AllowThisStubCall(stub));
2138 Call(stub->GetCode(), RelocInfo::CODE_TARGET,
kNoASTId, cond);
2142 void MacroAssembler::TailCallStub(CodeStub* stub,
Condition cond) {
2143 ASSERT(allow_stub_calls_ || stub->CompilingCallsToThisStubIsGCSafe());
2144 Jump(stub->GetCode(), RelocInfo::CODE_TARGET, cond);
2148 static int AddressOffset(ExternalReference ref0, ExternalReference ref1) {
2149 return ref0.address() - ref1.address();
2153 void MacroAssembler::CallApiFunctionAndReturn(ExternalReference
function,
2155 ExternalReference next_address =
2156 ExternalReference::handle_scope_next_address();
2157 const int kNextOffset = 0;
2158 const int kLimitOffset = AddressOffset(
2159 ExternalReference::handle_scope_limit_address(),
2161 const int kLevelOffset = AddressOffset(
2162 ExternalReference::handle_scope_level_address(),
2166 mov(
r7, Operand(next_address));
2170 add(
r6,
r6, Operand(1));
2176 DirectCEntryStub stub;
2177 stub.GenerateCall(
this,
function);
2179 Label promote_scheduled_exception;
2180 Label delete_allocated_handles;
2181 Label leave_exit_frame;
2185 cmp(
r0, Operand(0));
2186 LoadRoot(
r0, Heap::kUndefinedValueRootIndex,
eq);
2192 if (emit_debug_code()) {
2195 Check(
eq,
"Unexpected level after return from api call");
2197 sub(
r6,
r6, Operand(1));
2201 b(
ne, &delete_allocated_handles);
2204 bind(&leave_exit_frame);
2205 LoadRoot(
r4, Heap::kTheHoleValueRootIndex);
2206 mov(
ip, Operand(ExternalReference::scheduled_exception_address(isolate())));
2209 b(
ne, &promote_scheduled_exception);
2212 mov(
r4, Operand(stack_space));
2213 LeaveExitFrame(
false,
r4);
2216 bind(&promote_scheduled_exception);
2217 TailCallExternalReference(
2218 ExternalReference(Runtime::kPromoteScheduledException, isolate()),
2223 bind(&delete_allocated_handles);
2226 PrepareCallCFunction(1,
r5);
2227 mov(
r0, Operand(ExternalReference::isolate_address()));
2229 ExternalReference::delete_handle_scope_extensions(isolate()), 1);
2231 jmp(&leave_exit_frame);
2235 bool MacroAssembler::AllowThisStubCall(CodeStub* stub) {
2236 if (!has_frame_ && stub->SometimesSetsUpAFrame())
return false;
2237 return allow_stub_calls_ || stub->CompilingCallsToThisStubIsGCSafe();
2241 void MacroAssembler::IllegalOperation(
int num_arguments) {
2242 if (num_arguments > 0) {
2243 add(
sp,
sp, Operand(num_arguments * kPointerSize));
2245 LoadRoot(
r0, Heap::kUndefinedValueRootIndex);
2249 void MacroAssembler::IndexFromHash(Register hash, Register index) {
2255 (1 << String::kArrayIndexValueBits));
2259 Ubfx(hash, hash, String::kHashShift, String::kArrayIndexValueBits);
2264 void MacroAssembler::IntegerToDoubleConversionWithVFP3(Register inReg,
2265 Register outHighReg,
2266 Register outLowReg) {
2270 vcvt_f64_s32(
d7,
s15);
2271 vmov(outLowReg, outHighReg,
d7);
2275 void MacroAssembler::ObjectToDoubleVFPRegister(Register
object,
2276 DwVfpRegister result,
2279 Register heap_number_map,
2280 SwVfpRegister scratch3,
2286 JumpIfNotSmi(
object, ¬_smi);
2289 vmov(scratch3, scratch1);
2290 vcvt_f64_s32(result, scratch3);
2297 cmp(scratch1, heap_number_map);
2304 HeapNumber::kExponentShift,
2305 HeapNumber::kExponentBits);
2307 cmp(scratch1, Operand(-1));
2310 vldr(result, scratch2, HeapNumber::kValueOffset);
2315 void MacroAssembler::SmiToDoubleVFPRegister(Register smi,
2316 DwVfpRegister value,
2318 SwVfpRegister scratch2) {
2320 vmov(scratch2, scratch1);
2321 vcvt_f64_s32(value, scratch2);
2328 void MacroAssembler::ConvertToInt32(Register source,
2332 DwVfpRegister double_scratch,
2334 if (CpuFeatures::IsSupported(
VFP3)) {
2335 CpuFeatures::Scope scope(
VFP3);
2337 vldr(double_scratch, scratch, HeapNumber::kValueOffset);
2338 vcvt_s32_f64(double_scratch.low(), double_scratch);
2339 vmov(dest, double_scratch.low());
2344 sub(scratch, dest, Operand(1));
2345 cmp(scratch, Operand(LONG_MAX - 1));
2354 Label right_exponent, done;
2360 HeapNumber::kExponentShift,
2361 HeapNumber::kExponentBits);
2369 const uint32_t non_smi_exponent = HeapNumber::kExponentBias + 30;
2374 int fudge_factor = 0x400;
2375 sub(scratch2, scratch2, Operand(fudge_factor));
2376 cmp(scratch2, Operand(non_smi_exponent - fudge_factor));
2379 b(
eq, &right_exponent);
2387 const uint32_t zero_exponent = HeapNumber::kExponentBias + 0;
2388 sub(scratch2, scratch2, Operand(zero_exponent - fudge_factor),
SetCC);
2394 rsb(dest, scratch2, Operand(30));
2396 bind(&right_exponent);
2398 and_(scratch2, scratch, Operand(HeapNumber::kMantissaMask));
2400 orr(scratch2, scratch2, Operand(1 << HeapNumber::kExponentShift));
2405 const int shift_distance = HeapNumber::kNonMantissaBitsInTopWord - 2;
2406 mov(scratch2, Operand(scratch2,
LSL, shift_distance));
2414 orr(scratch, scratch2, Operand(scratch,
LSR, 32 - shift_distance));
2416 mov(dest, Operand(scratch,
LSR, dest));
2425 SwVfpRegister result,
2426 DwVfpRegister double_input,
2431 CpuFeatures::Scope scope(
VFP3);
2432 Register prev_fpscr = scratch1;
2433 Register scratch = scratch2;
2435 int32_t check_inexact_conversion =
2446 check_inexact_conversion |
2451 orr(scratch, scratch, Operand(rounding_mode));
2456 vcvt_s32_f64(result,
2470 void MacroAssembler::EmitOutOfInt32RangeTruncate(Register result,
2471 Register input_high,
2474 Label done, normal_exponent, restore_sign;
2479 HeapNumber::kExponentShift,
2480 HeapNumber::kExponentBits);
2483 cmp(result, Operand(HeapNumber::kExponentMask));
2490 Operand(HeapNumber::kExponentBias + HeapNumber::kMantissaBits + 31),
2495 b(
le, &normal_exponent);
2496 mov(result, Operand(0));
2499 bind(&normal_exponent);
2500 const int kShiftBase = HeapNumber::kNonMantissaBitsInTopWord - 1;
2502 add(scratch, result, Operand(kShiftBase + HeapNumber::kMantissaBits),
SetCC);
2505 Register
sign = result;
2512 Operand(1 << HeapNumber::kMantissaBitsInTopWord));
2516 mov(input_high, Operand(input_high,
LSL, scratch));
2519 Label pos_shift, shift_done;
2520 rsb(scratch, scratch, Operand(32),
SetCC);
2524 rsb(scratch, scratch, Operand(0));
2525 mov(input_low, Operand(input_low,
LSL, scratch));
2529 mov(input_low, Operand(input_low,
LSR, scratch));
2532 orr(input_high, input_high, Operand(input_low));
2534 cmp(sign, Operand(0));
2537 rsb(result, input_high, Operand(0),
LeaveCC,
ne);
2543 void MacroAssembler::EmitECMATruncate(Register result,
2544 DwVfpRegister double_input,
2545 SwVfpRegister single_scratch,
2547 Register input_high,
2548 Register input_low) {
2549 CpuFeatures::Scope scope(
VFP3);
2550 ASSERT(!input_high.is(result));
2551 ASSERT(!input_low.is(result));
2552 ASSERT(!input_low.is(input_high));
2553 ASSERT(!scratch.is(result) &&
2554 !scratch.is(input_high) &&
2555 !scratch.is(input_low));
2556 ASSERT(!single_scratch.is(double_input.low()) &&
2557 !single_scratch.is(double_input.high()));
2564 vcvt_s32_f64(single_scratch, double_input);
2565 vmov(result, single_scratch);
2576 vmov(input_low, input_high, double_input);
2577 EmitOutOfInt32RangeTruncate(result,
2585 void MacroAssembler::GetLeastBitsFromSmi(Register dst,
2587 int num_least_bits) {
2588 if (CpuFeatures::IsSupported(
ARMv7)) {
2592 and_(dst, dst, Operand((1 << num_least_bits) - 1));
2597 void MacroAssembler::GetLeastBitsFromInt32(Register dst,
2599 int num_least_bits) {
2600 and_(dst, src, Operand((1 << num_least_bits) - 1));
2604 void MacroAssembler::CallRuntime(
const Runtime::Function* f,
2605 int num_arguments) {
2611 if (f->nargs >= 0 && f->nargs != num_arguments) {
2612 IllegalOperation(num_arguments);
2620 mov(
r0, Operand(num_arguments));
2621 mov(
r1, Operand(ExternalReference(f, isolate())));
2627 void MacroAssembler::CallRuntime(Runtime::FunctionId fid,
int num_arguments) {
2628 CallRuntime(Runtime::FunctionForId(fid), num_arguments);
2632 void MacroAssembler::CallRuntimeSaveDoubles(Runtime::FunctionId
id) {
2633 const Runtime::Function*
function = Runtime::FunctionForId(
id);
2634 mov(
r0, Operand(function->nargs));
2635 mov(
r1, Operand(ExternalReference(
function, isolate())));
2641 void MacroAssembler::CallExternalReference(
const ExternalReference& ext,
2642 int num_arguments) {
2643 mov(
r0, Operand(num_arguments));
2644 mov(
r1, Operand(ext));
2651 void MacroAssembler::TailCallExternalReference(
const ExternalReference& ext,
2658 mov(
r0, Operand(num_arguments));
2659 JumpToExternalReference(ext);
2663 void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
2666 TailCallExternalReference(ExternalReference(fid, isolate()),
2672 void MacroAssembler::JumpToExternalReference(
const ExternalReference& builtin) {
2673 #if defined(__thumb__)
2675 ASSERT((reinterpret_cast<intptr_t>(builtin.address()) & 1) == 1);
2677 mov(
r1, Operand(builtin));
2679 Jump(stub.GetCode(), RelocInfo::CODE_TARGET);
2683 void MacroAssembler::InvokeBuiltin(Builtins::JavaScript
id,
2685 const CallWrapper& call_wrapper) {
2689 GetBuiltinEntry(
r2,
id);
2691 call_wrapper.BeforeCall(CallSize(
r2));
2694 call_wrapper.AfterCall();
2703 void MacroAssembler::GetBuiltinFunction(Register target,
2704 Builtins::JavaScript
id) {
2706 ldr(target,
MemOperand(
cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
2710 JSBuiltinsObject::OffsetOfFunctionWithId(
id)));
2714 void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript
id) {
2716 GetBuiltinFunction(
r1,
id);
2722 void MacroAssembler::SetCounter(StatsCounter* counter,
int value,
2723 Register scratch1, Register scratch2) {
2724 if (FLAG_native_code_counters && counter->Enabled()) {
2725 mov(scratch1, Operand(value));
2726 mov(scratch2, Operand(ExternalReference(counter)));
2732 void MacroAssembler::IncrementCounter(StatsCounter* counter,
int value,
2733 Register scratch1, Register scratch2) {
2735 if (FLAG_native_code_counters && counter->Enabled()) {
2736 mov(scratch2, Operand(ExternalReference(counter)));
2738 add(scratch1, scratch1, Operand(value));
2744 void MacroAssembler::DecrementCounter(StatsCounter* counter,
int value,
2745 Register scratch1, Register scratch2) {
2747 if (FLAG_native_code_counters && counter->Enabled()) {
2748 mov(scratch2, Operand(ExternalReference(counter)));
2750 sub(scratch1, scratch1, Operand(value));
2756 void MacroAssembler::Assert(
Condition cond,
const char* msg) {
2757 if (emit_debug_code())
2762 void MacroAssembler::AssertRegisterIsRoot(Register reg,
2763 Heap::RootListIndex index) {
2764 if (emit_debug_code()) {
2765 LoadRoot(
ip, index);
2767 Check(
eq,
"Register did not match expected root");
2772 void MacroAssembler::AssertFastElements(Register elements) {
2773 if (emit_debug_code()) {
2778 LoadRoot(
ip, Heap::kFixedArrayMapRootIndex);
2781 LoadRoot(
ip, Heap::kFixedDoubleArrayMapRootIndex);
2784 LoadRoot(
ip, Heap::kFixedCOWArrayMapRootIndex);
2787 Abort(
"JSObject with fast elements map has slow elements");
2794 void MacroAssembler::Check(
Condition cond,
const char* msg) {
2803 void MacroAssembler::Abort(
const char* msg) {
2811 intptr_t
p1 =
reinterpret_cast<intptr_t
>(msg);
2813 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
2816 RecordComment(
"Abort message: ");
2821 mov(
r0, Operand(p0));
2823 mov(
r0, Operand(Smi::FromInt(p1 - p0)));
2830 CallRuntime(Runtime::kAbort, 2);
2832 CallRuntime(Runtime::kAbort, 2);
2835 if (is_const_pool_blocked()) {
2839 static const int kExpectedAbortInstructions = 10;
2840 int abort_instructions = InstructionsGeneratedSince(&abort_start);
2841 ASSERT(abort_instructions <= kExpectedAbortInstructions);
2842 while (abort_instructions++ < kExpectedAbortInstructions) {
2849 void MacroAssembler::LoadContext(Register dst,
int context_chain_length) {
2850 if (context_chain_length > 0) {
2852 ldr(dst,
MemOperand(
cp, Context::SlotOffset(Context::PREVIOUS_INDEX)));
2853 for (
int i = 1; i < context_chain_length; i++) {
2854 ldr(dst,
MemOperand(dst, Context::SlotOffset(Context::PREVIOUS_INDEX)));
2865 void MacroAssembler::LoadTransitionedArrayMapConditional(
2868 Register map_in_out,
2870 Label* no_map_match) {
2872 ldr(scratch,
MemOperand(
cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
2873 ldr(scratch,
FieldMemOperand(scratch, GlobalObject::kGlobalContextOffset));
2878 Context::SlotOffset(Context::JS_ARRAY_MAPS_INDEX)));
2879 size_t offset = expected_kind * kPointerSize +
2880 FixedArrayBase::kHeaderSize;
2881 cmp(map_in_out, scratch);
2882 b(
ne, no_map_match);
2885 offset = transitioned_kind * kPointerSize +
2886 FixedArrayBase::kHeaderSize;
2891 void MacroAssembler::LoadInitialArrayMap(
2892 Register function_in, Register scratch,
2893 Register map_out,
bool can_have_holes) {
2894 ASSERT(!function_in.is(map_out));
2897 JSFunction::kPrototypeOrInitialMapOffset));
2898 if (!FLAG_smi_only_arrays) {
2905 }
else if (can_have_holes) {
2916 void MacroAssembler::LoadGlobalFunction(
int index, Register
function) {
2918 ldr(
function,
MemOperand(
cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
2921 GlobalObject::kGlobalContextOffset));
2923 ldr(
function,
MemOperand(
function, Context::SlotOffset(index)));
2927 void MacroAssembler::LoadGlobalFunctionInitialMap(Register
function,
2931 ldr(map,
FieldMemOperand(
function, JSFunction::kPrototypeOrInitialMapOffset));
2932 if (emit_debug_code()) {
2934 CheckMap(map, scratch, Heap::kMetaMapRootIndex, &fail,
DO_SMI_CHECK);
2937 Abort(
"Global functions must have initial map");
2943 void MacroAssembler::JumpIfNotPowerOfTwoOrZero(
2946 Label* not_power_of_two_or_zero) {
2947 sub(scratch, reg, Operand(1),
SetCC);
2948 b(
mi, not_power_of_two_or_zero);
2950 b(
ne, not_power_of_two_or_zero);
2954 void MacroAssembler::JumpIfNotPowerOfTwoOrZeroAndNeg(
2957 Label* zero_and_neg,
2958 Label* not_power_of_two) {
2959 sub(scratch, reg, Operand(1),
SetCC);
2960 b(
mi, zero_and_neg);
2962 b(
ne, not_power_of_two);
2966 void MacroAssembler::JumpIfNotBothSmi(Register reg1,
2968 Label* on_not_both_smi) {
2972 b(
ne, on_not_both_smi);
2976 void MacroAssembler::UntagAndJumpIfSmi(
2977 Register dst, Register src, Label* smi_case) {
2984 void MacroAssembler::UntagAndJumpIfNotSmi(
2985 Register dst, Register src, Label* non_smi_case) {
2988 b(
cs, non_smi_case);
2992 void MacroAssembler::JumpIfEitherSmi(Register reg1,
2994 Label* on_either_smi) {
2998 b(
eq, on_either_smi);
3002 void MacroAssembler::AbortIfSmi(Register
object) {
3005 Assert(
ne,
"Operand is a smi");
3009 void MacroAssembler::AbortIfNotSmi(Register
object) {
3012 Assert(
eq,
"Operand is not smi");
3016 void MacroAssembler::AbortIfNotString(Register
object) {
3019 Assert(
ne,
"Operand is not a string");
3024 Assert(
lo,
"Operand is not a string");
3029 void MacroAssembler::AbortIfNotRootValue(Register src,
3030 Heap::RootListIndex root_value_index,
3031 const char* message) {
3032 CompareRoot(src, root_value_index);
3033 Assert(
eq, message);
3037 void MacroAssembler::JumpIfNotHeapNumber(Register
object,
3038 Register heap_number_map,
3040 Label* on_not_heap_number) {
3042 AssertRegisterIsRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
3043 cmp(scratch, heap_number_map);
3044 b(
ne, on_not_heap_number);
3048 void MacroAssembler::JumpIfNonSmisNotBothSequentialAsciiStrings(
3061 JumpIfBothInstanceTypesAreNotSequentialAscii(scratch1,
3068 void MacroAssembler::JumpIfNotBothSequentialAsciiStrings(Register first,
3075 and_(scratch1, first, Operand(second));
3076 JumpIfSmi(scratch1, failure);
3077 JumpIfNonSmisNotBothSequentialAsciiStrings(first,
3087 void MacroAssembler::AllocateHeapNumber(Register result,
3090 Register heap_number_map,
3091 Label* gc_required) {
3094 AllocateInNewSpace(HeapNumber::kSize,
3102 AssertRegisterIsRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
3103 str(heap_number_map,
FieldMemOperand(result, HeapObject::kMapOffset));
3107 void MacroAssembler::AllocateHeapNumberWithValue(Register result,
3108 DwVfpRegister value,
3111 Register heap_number_map,
3112 Label* gc_required) {
3113 AllocateHeapNumber(result, scratch1, scratch2, heap_number_map, gc_required);
3115 vstr(value, scratch1, HeapNumber::kValueOffset);
3120 void MacroAssembler::CopyFields(Register dst,
3125 ASSERT((temps & ((1 << 15) - 1)) != 0);
3126 ASSERT((temps & dst.bit()) == 0);
3127 ASSERT((temps & src.bit()) == 0);
3132 for (
int i = 0; i < 15; i++) {
3133 if ((temps & (1 << i)) != 0) {
3140 for (
int i = 0; i < field_count; i++) {
3147 void MacroAssembler::CopyBytes(Register src,
3151 Label align_loop, align_loop_1, word_loop, byte_loop, byte_loop_1, done;
3155 cmp(length, Operand(0));
3157 bind(&align_loop_1);
3158 tst(src, Operand(kPointerSize - 1));
3162 sub(length, length, Operand(1),
SetCC);
3163 b(
ne, &byte_loop_1);
3167 if (emit_debug_code()) {
3168 tst(src, Operand(kPointerSize - 1));
3169 Assert(
eq,
"Expecting alignment for CopyBytes");
3171 cmp(length, Operand(kPointerSize));
3174 #if CAN_USE_UNALIGNED_ACCESSES
3178 mov(scratch, Operand(scratch,
LSR, 8));
3180 mov(scratch, Operand(scratch,
LSR, 8));
3182 mov(scratch, Operand(scratch,
LSR, 8));
3185 sub(length, length, Operand(kPointerSize));
3190 cmp(length, Operand(0));
3195 sub(length, length, Operand(1),
SetCC);
3196 b(
ne, &byte_loop_1);
3201 void MacroAssembler::InitializeFieldsWithFiller(Register start_offset,
3202 Register end_offset,
3209 cmp(start_offset, end_offset);
3214 void MacroAssembler::CountLeadingZeros(Register zeros,
3217 ASSERT(!zeros.is(source) || !source.is(scratch));
3218 ASSERT(!zeros.is(scratch));
3222 #ifdef CAN_USE_ARMV5_INSTRUCTIONS
3227 Move(scratch, source);
3230 tst(scratch, Operand(0xffff0000));
3231 add(zeros, zeros, Operand(16),
LeaveCC,
eq);
3234 tst(scratch, Operand(0xff000000));
3235 add(zeros, zeros, Operand(8),
LeaveCC,
eq);
3238 tst(scratch, Operand(0xf0000000));
3239 add(zeros, zeros, Operand(4),
LeaveCC,
eq);
3242 tst(scratch, Operand(0xc0000000));
3243 add(zeros, zeros, Operand(2),
LeaveCC,
eq);
3246 tst(scratch, Operand(0x80000000u));
3247 add(zeros, zeros, Operand(1),
LeaveCC,
eq);
3252 void MacroAssembler::JumpIfBothInstanceTypesAreNotSequentialAscii(
3258 int kFlatAsciiStringMask =
3261 and_(scratch1, first, Operand(kFlatAsciiStringMask));
3262 and_(scratch2, second, Operand(kFlatAsciiStringMask));
3263 cmp(scratch1, Operand(kFlatAsciiStringTag));
3265 cmp(scratch2, Operand(kFlatAsciiStringTag),
eq);
3270 void MacroAssembler::JumpIfInstanceTypeIsNotSequentialAscii(Register type,
3273 int kFlatAsciiStringMask =
3276 and_(scratch, type, Operand(kFlatAsciiStringMask));
3277 cmp(scratch, Operand(kFlatAsciiStringTag));
3281 static const int kRegisterPassedArguments = 4;
3284 int MacroAssembler::CalculateStackPassedWords(
int num_reg_arguments,
3285 int num_double_arguments) {
3286 int stack_passed_words = 0;
3287 if (use_eabi_hardfloat()) {
3291 stack_passed_words +=
3297 num_reg_arguments += 2 * num_double_arguments;
3300 if (num_reg_arguments > kRegisterPassedArguments) {
3301 stack_passed_words += num_reg_arguments - kRegisterPassedArguments;
3303 return stack_passed_words;
3307 void MacroAssembler::PrepareCallCFunction(
int num_reg_arguments,
3308 int num_double_arguments,
3310 int frame_alignment = ActivationFrameAlignment();
3311 int stack_passed_arguments = CalculateStackPassedWords(
3312 num_reg_arguments, num_double_arguments);
3313 if (frame_alignment > kPointerSize) {
3317 sub(
sp,
sp, Operand((stack_passed_arguments + 1) * kPointerSize));
3319 and_(
sp,
sp, Operand(-frame_alignment));
3320 str(scratch,
MemOperand(
sp, stack_passed_arguments * kPointerSize));
3322 sub(
sp,
sp, Operand(stack_passed_arguments * kPointerSize));
3327 void MacroAssembler::PrepareCallCFunction(
int num_reg_arguments,
3329 PrepareCallCFunction(num_reg_arguments, 0, scratch);
3333 void MacroAssembler::SetCallCDoubleArguments(
DoubleRegister dreg) {
3334 if (use_eabi_hardfloat()) {
3342 void MacroAssembler::SetCallCDoubleArguments(
DoubleRegister dreg1,
3344 if (use_eabi_hardfloat()) {
3354 vmov(
r0,
r1, dreg1);
3355 vmov(
r2,
r3, dreg2);
3360 void MacroAssembler::SetCallCDoubleArguments(
DoubleRegister dreg,
3362 if (use_eabi_hardfloat()) {
3372 void MacroAssembler::CallCFunction(ExternalReference
function,
3373 int num_reg_arguments,
3374 int num_double_arguments) {
3375 mov(
ip, Operand(
function));
3376 CallCFunctionHelper(
ip, num_reg_arguments, num_double_arguments);
3380 void MacroAssembler::CallCFunction(Register
function,
3381 int num_reg_arguments,
3382 int num_double_arguments) {
3383 CallCFunctionHelper(
function, num_reg_arguments, num_double_arguments);
3387 void MacroAssembler::CallCFunction(ExternalReference
function,
3388 int num_arguments) {
3389 CallCFunction(
function, num_arguments, 0);
3393 void MacroAssembler::CallCFunction(Register
function,
3394 int num_arguments) {
3395 CallCFunction(
function, num_arguments, 0);
3399 void MacroAssembler::CallCFunctionHelper(Register
function,
3400 int num_reg_arguments,
3401 int num_double_arguments) {
3406 #if defined(V8_HOST_ARCH_ARM)
3407 if (emit_debug_code()) {
3408 int frame_alignment = OS::ActivationFrameAlignment();
3409 int frame_alignment_mask = frame_alignment - 1;
3410 if (frame_alignment > kPointerSize) {
3412 Label alignment_as_expected;
3413 tst(
sp, Operand(frame_alignment_mask));
3414 b(
eq, &alignment_as_expected);
3417 stop(
"Unexpected alignment");
3418 bind(&alignment_as_expected);
3427 int stack_passed_arguments = CalculateStackPassedWords(
3428 num_reg_arguments, num_double_arguments);
3429 if (ActivationFrameAlignment() > kPointerSize) {
3432 add(
sp,
sp, Operand(stack_passed_arguments *
sizeof(kPointerSize)));
3437 void MacroAssembler::GetRelocatedValueLocation(Register ldr_location,
3439 const uint32_t kLdrOffsetMask = (1 << 12) - 1;
3442 if (emit_debug_code()) {
3446 Check(
eq,
"The instruction to patch should be a load from pc.");
3451 and_(result, result, Operand(kLdrOffsetMask));
3452 add(result, ldr_location, Operand(result));
3453 add(result, result, Operand(kPCRegOffset));
3457 void MacroAssembler::CheckPageFlag(
3462 Label* condition_met) {
3463 and_(scratch,
object, Operand(~Page::kPageAlignmentMask));
3464 ldr(scratch,
MemOperand(scratch, MemoryChunk::kFlagsOffset));
3465 tst(scratch, Operand(mask));
3466 b(cc, condition_met);
3470 void MacroAssembler::JumpIfBlack(Register
object,
3474 HasColor(
object, scratch0, scratch1, on_black, 1, 0);
3475 ASSERT(strcmp(Marking::kBlackBitPattern,
"10") == 0);
3479 void MacroAssembler::HasColor(Register
object,
3480 Register bitmap_scratch,
3481 Register mask_scratch,
3487 GetMarkBits(
object, bitmap_scratch, mask_scratch);
3489 Label other_color, word_boundary;
3490 ldr(
ip,
MemOperand(bitmap_scratch, MemoryChunk::kHeaderSize));
3491 tst(
ip, Operand(mask_scratch));
3492 b(first_bit == 1 ?
eq :
ne, &other_color);
3494 add(mask_scratch, mask_scratch, Operand(mask_scratch),
SetCC);
3495 b(
eq, &word_boundary);
3496 tst(
ip, Operand(mask_scratch));
3497 b(second_bit == 1 ?
ne :
eq, has_color);
3500 bind(&word_boundary);
3501 ldr(
ip,
MemOperand(bitmap_scratch, MemoryChunk::kHeaderSize + kPointerSize));
3502 tst(
ip, Operand(1));
3503 b(second_bit == 1 ?
ne :
eq, has_color);
3511 void MacroAssembler::JumpIfDataObject(Register value,
3513 Label* not_data_object) {
3514 Label is_data_object;
3516 CompareRoot(scratch, Heap::kHeapNumberMapRootIndex);
3517 b(
eq, &is_data_object);
3524 b(
ne, not_data_object);
3525 bind(&is_data_object);
3529 void MacroAssembler::GetMarkBits(Register addr_reg,
3530 Register bitmap_reg,
3531 Register mask_reg) {
3533 and_(bitmap_reg, addr_reg, Operand(~Page::kPageAlignmentMask));
3538 mov(
ip, Operand(1));
3539 mov(mask_reg, Operand(
ip,
LSL, mask_reg));
3543 void MacroAssembler::EnsureNotWhite(
3545 Register bitmap_scratch,
3546 Register mask_scratch,
3547 Register load_scratch,
3548 Label* value_is_white_and_not_data) {
3550 GetMarkBits(value, bitmap_scratch, mask_scratch);
3553 ASSERT(strcmp(Marking::kWhiteBitPattern,
"00") == 0);
3554 ASSERT(strcmp(Marking::kBlackBitPattern,
"10") == 0);
3555 ASSERT(strcmp(Marking::kGreyBitPattern,
"11") == 0);
3556 ASSERT(strcmp(Marking::kImpossibleBitPattern,
"01") == 0);
3562 ldr(load_scratch,
MemOperand(bitmap_scratch, MemoryChunk::kHeaderSize));
3563 tst(mask_scratch, load_scratch);
3566 if (emit_debug_code()) {
3570 tst(load_scratch, Operand(mask_scratch,
LSL, 1));
3572 stop(
"Impossible marking bit pattern");
3578 Register map = load_scratch;
3579 Register length = load_scratch;
3580 Label is_data_object;
3584 CompareRoot(map, Heap::kHeapNumberMapRootIndex);
3585 mov(length, Operand(HeapNumber::kSize),
LeaveCC,
eq);
3586 b(
eq, &is_data_object);
3593 Register instance_type = load_scratch;
3596 b(
ne, value_is_white_and_not_data);
3605 mov(length, Operand(ExternalString::kSize),
LeaveCC,
ne);
3606 b(
ne, &is_data_object);
3620 bind(&is_data_object);
3623 ldr(
ip,
MemOperand(bitmap_scratch, MemoryChunk::kHeaderSize));
3624 orr(
ip,
ip, Operand(mask_scratch));
3625 str(
ip,
MemOperand(bitmap_scratch, MemoryChunk::kHeaderSize));
3627 and_(bitmap_scratch, bitmap_scratch, Operand(~Page::kPageAlignmentMask));
3628 ldr(
ip,
MemOperand(bitmap_scratch, MemoryChunk::kLiveBytesOffset));
3629 add(
ip,
ip, Operand(length));
3630 str(
ip,
MemOperand(bitmap_scratch, MemoryChunk::kLiveBytesOffset));
3636 void MacroAssembler::ClampUint8(Register output_reg, Register input_reg) {
3637 Usat(output_reg, 8, Operand(input_reg));
3641 void MacroAssembler::ClampDoubleToUint8(Register result_reg,
3648 Vmov(temp_double_reg, 0.0);
3649 VFPCompareAndSetFlags(input_reg, temp_double_reg);
3653 mov(result_reg, Operand(0));
3658 Vmov(temp_double_reg, 255.0);
3659 VFPCompareAndSetFlags(input_reg, temp_double_reg);
3661 mov(result_reg, Operand(255));
3666 Vmov(temp_double_reg, 0.5);
3667 vadd(temp_double_reg, input_reg, temp_double_reg);
3668 vcvt_u32_f64(temp_double_reg.low(), temp_double_reg);
3669 vmov(result_reg, temp_double_reg.low());
3674 void MacroAssembler::LoadInstanceDescriptors(Register map,
3675 Register descriptors) {
3679 JumpIfNotSmi(descriptors, ¬_smi);
3680 mov(descriptors, Operand(
FACTORY->empty_descriptor_array()));
3685 void MacroAssembler::CheckEnumCache(Register null_value, Label* call_runtime) {
3688 Register empty_fixed_array_value =
r6;
3689 LoadRoot(empty_fixed_array_value, Heap::kEmptyFixedArrayRootIndex);
3690 Register empty_descriptor_array_value =
r7;
3691 LoadRoot(empty_descriptor_array_value,
3692 Heap::kEmptyDescriptorArrayRootIndex);
3699 cmp(
r2, empty_fixed_array_value);
3700 b(
ne, call_runtime);
3707 JumpIfSmi(
r3, call_runtime);
3713 JumpIfSmi(
r3, call_runtime);
3716 Label check_prototype;
3718 b(
eq, &check_prototype);
3720 cmp(
r3, empty_fixed_array_value);
3721 b(
ne, call_runtime);
3724 bind(&check_prototype);
3726 cmp(
r1, null_value);
3738 int n_of_valid_regs = reg1.is_valid() + reg2.is_valid() +
3739 reg3.is_valid() + reg4.is_valid() + reg5.is_valid() + reg6.is_valid();
3742 if (reg1.is_valid()) regs |= reg1.bit();
3743 if (reg2.is_valid()) regs |= reg2.bit();
3744 if (reg3.is_valid()) regs |= reg3.bit();
3745 if (reg4.is_valid()) regs |= reg4.bit();
3746 if (reg5.is_valid()) regs |= reg5.bit();
3747 if (reg6.is_valid()) regs |= reg6.bit();
3748 int n_of_non_aliasing_regs =
NumRegs(regs);
3750 return n_of_valid_regs != n_of_non_aliasing_regs;
3756 : address_(address),
3757 instructions_(instructions),
3758 size_(instructions * Assembler::kInstrSize),
3759 masm_(
NULL, address, size_ + Assembler::kGap) {
3763 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
3767 CodePatcher::~CodePatcher() {
3769 CPU::FlushICache(address_, size_);
3772 ASSERT(masm_.pc_ == address_ + size_);
3773 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
3777 void CodePatcher::Emit(
Instr instr) {
3778 masm()->emit(instr);
3782 void CodePatcher::Emit(
Address addr) {
3783 masm()->emit(reinterpret_cast<Instr>(addr));
3787 void CodePatcher::EmitCondition(
Condition cond) {
3788 Instr instr = Assembler::instr_at(masm_.pc_);
3796 #endif // V8_TARGET_ARCH_ARM
const RegList kSafepointSavedRegisters
const intptr_t kSmiTagMask
const uint32_t kNaNOrInfinityLowerBoundUpper32
const int kDoubleSizeLog2
const Instr kLdrPCPattern
const uint32_t kVFPInvalidOpExceptionBit
int NumRegs(RegList reglist)
const uint32_t kVFPOverflowExceptionBit
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination trace on stack replacement optimize closures functions with arguments object optimize functions containing for in loops profiler considers IC stability primitive functions trigger their own optimization re try self optimization if it failed insert an interrupt check at function exit execution budget before interrupt is triggered call count before self optimization self_optimization count_based_interrupts weighted_back_edges trace_opt emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 instructions(ARM only)") DEFINE_bool(enable_armv7
const uint32_t kVFPUnderflowExceptionBit
const int kNumSafepointSavedRegisters
bool AreAliased(Register r1, Register r2, Register r3, Register r4)
#define ASSERT(condition)
bool CanTransitionToMoreGeneralFastElementsKind(ElementsKind elements_kind, bool allow_only_packed)
const int kPointerSizeLog2
const uint32_t kStringRepresentationMask
int WhichPowerOf2(uint32_t x)
const intptr_t kObjectAlignmentMask
bool IsFastElementsKind(ElementsKind kind)
const intptr_t kHeapObjectTagMask
const Register kRootRegister
const uint32_t kVFPFlushToZeroMask
const uint32_t kNotStringTag
DwVfpRegister DoubleRegister
bool IsFastPackedElementsKind(ElementsKind kind)
const uint32_t kIsIndirectStringMask
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination trace on stack replacement optimize closures functions with arguments object optimize functions containing for in loops profiler considers IC stability primitive functions trigger their own optimization re try self optimization if it failed insert an interrupt check at function exit execution budget before interrupt is triggered call count before self optimization self_optimization count_based_interrupts weighted_back_edges trace_opt emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 enable use of ARMv7 instructions if enable use of MIPS FPU instructions if NULL
bool IsAligned(T value, U alignment)
const uint32_t kHoleNanLower32
const uint32_t kVFPExceptionMask
int TenToThe(int exponent)
MacroAssembler(Isolate *isolate, void *buffer, int size)
const uint32_t kIsNotStringMask
MemOperand FieldMemOperand(Register object, int offset)
const int kNumSafepointRegisters
ElementsKind GetNextMoreGeneralFastElementsKind(ElementsKind elements_kind, bool allow_only_packed)
const uint32_t kVFPInexactExceptionBit
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset kAccessCheckInfoOffset kEvalFrominstructionsOffsetOffset kThisPropertyAssignmentsOffset flag
Condition NegateCondition(Condition cond)
#define ASSERT_EQ(v1, v2)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping true
const uint32_t kIsIndirectStringTag
CheckForInexactConversion
const uint32_t kVFPRoundingModeMask
#define STATIC_ASSERT(test)
const uint32_t kAsciiStringTag
const uint32_t kStringEncodingMask