36 #ifndef V8_MIPS_ASSEMBLER_MIPS_H_
37 #define V8_MIPS_ASSEMBLER_MIPS_H_
79 return reg.code() - 2;
89 const char*
const names[] = {
128 #define REGISTER(N, C) \
129 const int kRegister_ ## N ## _Code = C; \
130 const Register N = { C }
213 const char*
const names[] = {
324 #define kRootRegister s6
326 #define kLithiumScratchReg s3
327 #define kLithiumScratchReg2 s4
328 #define kLithiumScratchDouble f30
329 #define kDoubleRegZero f28
365 INLINE(
explicit Operand(
const ExternalReference& f));
366 INLINE(
explicit Operand(
const char* s));
370 INLINE(
explicit Operand(Smi* value));
373 INLINE(
explicit Operand(Register rm));
376 INLINE(
bool is_reg()
const);
383 RelocInfo::Mode rmode_;
419 if (f ==
FPU && !FLAG_enable_fpu)
return false;
420 return (supported_ & (1u << f)) != 0;
428 Isolate* isolate = Isolate::UncheckedCurrent();
429 if (isolate ==
NULL) {
434 unsigned enabled =
static_cast<unsigned>(isolate->enabled_cpu_features());
435 return (enabled & (1u << f)) != 0;
445 unsigned mask = 1u << f;
448 (CpuFeatures::found_by_runtime_probing_ & mask) == 0);
449 isolate_ = Isolate::UncheckedCurrent();
451 if (isolate_ !=
NULL) {
452 old_enabled_ =
static_cast<unsigned>(isolate_->enabled_cpu_features());
453 isolate_->set_enabled_cpu_features(old_enabled_ | mask);
457 ASSERT_EQ(Isolate::UncheckedCurrent(), isolate_);
458 if (isolate_ !=
NULL) {
459 isolate_->set_enabled_cpu_features(old_enabled_);
465 unsigned old_enabled_;
478 CpuFeatures::supported_ |= (1u << f);
484 CpuFeatures::supported_ = old_supported_;
489 static bool CanForce() {
496 const unsigned old_supported_;
501 static bool initialized_;
503 static unsigned supported_;
504 static unsigned found_by_runtime_probing_;
506 DISALLOW_COPY_AND_ASSIGN(CpuFeatures);
510 class Assembler :
public AssemblerBase {
525 Assembler(Isolate* isolate,
void* buffer,
int buffer_size);
678 void nop(
unsigned int type = 0) {
680 Register nop_rt_reg = (type == 0) ? zero_reg : at;
681 sll(zero_reg, nop_rt_reg, type,
true);
776 void break_(uint32_t
code,
bool break_as_stop =
false);
947 void db(uint8_t data);
948 void dd(uint32_t data);
961 inline bool overflow()
const {
return pc_ >= reloc_info_writer.pos() - kGap; }
969 *
reinterpret_cast<Instr*
>(
pc) = instr;
973 *
reinterpret_cast<Instr*
>(buffer_ + pos) = instr;
990 static bool IsNop(
Instr instr,
unsigned int type);
1050 void RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data = 0);
1054 if (no_trampoline_pool_before_ < pc_offset)
1059 trampoline_pool_blocked_nesting_++;
1063 trampoline_pool_blocked_nesting_--;
1067 return trampoline_pool_blocked_nesting_ > 0;
1071 return internal_trampoline_exception_;
1077 return trampoline_emitted_;
1082 ASSERT(!block_buffer_growth_);
1083 block_buffer_growth_ =
true;
1087 ASSERT(block_buffer_growth_);
1088 block_buffer_growth_ =
false;
1092 return block_buffer_growth_;
1105 static const int kBufferCheckInterval = 1*
KB/2;
1112 static const int kGap = 32;
1119 static const int kCheckConstIntervalInst = 32;
1120 static const int kCheckConstInterval = kCheckConstIntervalInst *
kInstrSize;
1122 int next_buffer_check_;
1125 int trampoline_pool_blocked_nesting_;
1126 int no_trampoline_pool_before_;
1129 int last_trampoline_pool_end_;
1132 bool block_buffer_growth_;
1136 static const int kMaxRelocSize = RelocInfoWriter::kMaxSize;
1137 RelocInfoWriter reloc_info_writer;
1140 int last_bound_pos_;
1143 inline void CheckBuffer();
1145 inline void emit(
Instr x);
1146 inline void CheckTrampolinePoolQuick();
1156 void GenInstrRegister(
Opcode opcode,
1163 void GenInstrRegister(
Opcode opcode,
1170 void GenInstrRegister(
Opcode opcode,
1177 void GenInstrRegister(
Opcode opcode,
1184 void GenInstrRegister(
Opcode opcode,
1191 void GenInstrImmediate(
Opcode opcode,
1195 void GenInstrImmediate(
Opcode opcode,
1199 void GenInstrImmediate(
Opcode opcode,
1205 void GenInstrJump(
Opcode opcode,
1209 void LoadRegPlusOffsetToAt(
const MemOperand& src);
1212 void print(Label*
L);
1213 void bind_to(Label*
L,
int pos);
1214 void next(Label*
L);
1229 free_slot_count_ = 0;
1232 Trampoline(
int start,
int slot_count) {
1235 free_slot_count_ = slot_count;
1236 end_ = start + slot_count * kTrampolineSlotsSize;
1245 int trampoline_slot = kInvalidSlotPos;
1246 if (free_slot_count_ <= 0) {
1253 trampoline_slot = next_slot_;
1255 next_slot_ += kTrampolineSlotsSize;
1257 return trampoline_slot;
1264 int free_slot_count_;
1268 int unbound_labels_count_;
1274 bool trampoline_emitted_;
1275 static const int kTrampolineSlotsSize = 4 *
kInstrSize;
1276 static const int kMaxBranchOffset = (1 << (18 - 1)) - 1;
1277 static const int kInvalidSlotPos = -1;
1279 Trampoline trampoline_;
1280 bool internal_trampoline_exception_;
1288 bool emit_debug_code_;
1297 assembler->CheckBuffer();
1303 #endif // V8_ARM_ASSEMBLER_MIPS_H_
void addu(Register rd, Register rs, Register rt)
static bool IsBranch(Instr instr)
void EndBlockGrowBuffer()
int InstructionsGeneratedSince(Label *label)
static const int kBranchPCOffset
void andi(Register rd, Register rs, int32_t j)
void beq(Register rs, Register rt, int16_t offset)
void cvt_l_d(FPURegister fd, FPURegister fs)
static int GetBranchOffset(Instr instr)
static uint32_t GetRt(Instr instr)
void ClearRecordedAstId()
void trunc_l_d(FPURegister fd, FPURegister fs)
static const int kDebugBreakSlotInstructions
static uint32_t GetOpcodeField(Instr instr)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 instructions(ARM only)") DEFINE_bool(enable_vfp2
void mtc1(Register rt, FPURegister fs)
static bool IsAddImmediate(Instr instr)
TryForceFeatureScope(CpuFeature f)
void bc1t(Label *L, uint16_t cc=0)
static Register GetRsReg(Instr instr)
void round_l_s(FPURegister fd, FPURegister fs)
static int ToAllocationIndex(FPURegister reg)
void swc1(FPURegister fs, const MemOperand &dst)
void round_w_d(FPURegister fd, FPURegister fs)
void bgezal(Register rs, int16_t offset)
void instr_at_put(int pos, Instr instr)
void neg_d(FPURegister fd, FPURegister fs)
void blez(Register rs, int16_t offset)
void sw(Register rd, const MemOperand &rs)
void cvt_s_l(FPURegister fd, FPURegister fs)
void mov_d(FPURegister fd, FPURegister fs)
static TypeFeedbackId None()
void rotr(Register rd, Register rt, uint16_t sa)
static uint32_t GetImmediate16(Instr instr)
void sqrt_d(FPURegister fd, FPURegister fs)
static bool IsSw(Instr instr)
static Instr SetAddImmediateOffset(Instr instr, int16_t offset)
static uint32_t GetFunctionField(Instr instr)
static const int kPatchDebugBreakSlotReturnOffset
int SizeOfCodeGeneratedSince(Label *label)
void tne(Register rs, Register rt, uint16_t code)
void or_(Register dst, int32_t imm32)
void round_w_s(FPURegister fd, FPURegister fs)
const int kInvalidFPUControlRegister
void b(int branch_offset, Condition cond=al)
int32_t buffer_space() const
void floor_l_s(FPURegister fd, FPURegister fs)
static bool IsSupported(CpuFeature f)
static uint32_t GetRsField(Instr instr)
void mul_d(FPURegister fd, FPURegister fs, FPURegister ft)
void clz(Register dst, Register src, Condition cond=al)
void bc1t(int16_t offset, uint16_t cc=0)
void div(Register rs, Register rt)
static uint32_t GetRs(Instr instr)
void j(Condition cc, Label *L, Label::Distance distance=Label::kFar)
static bool IsLwRegFpOffset(Instr instr)
BlockGrowBufferScope(Assembler *assem)
static Register FromAllocationIndex(int index)
bool is(FPURegister creg) const
const uint32_t kMaxStopCode
TypeFeedbackId RecordedAstId()
#define ASSERT(condition)
void swr(Register rd, const MemOperand &rs)
void ext_(Register rt, Register rs, uint16_t pos, uint16_t size)
static const int kPatchReturnSequenceAddressOffset
void SetRecordedAstId(TypeFeedbackId ast_id)
const FPUControlRegister no_fpucreg
static uint32_t GetRdField(Instr instr)
static Instr instr_at(byte *pc)
void DoubleAsTwoUInt32(double d, uint32_t *lo, uint32_t *hi)
static Instr SetSwOffset(Instr instr, int16_t offset)
void sll(Register rd, Register rt, uint16_t sa, bool coming_from_nop=false)
void cvt_d_s(FPURegister fd, FPURegister fs)
static bool IsJalr(Instr instr)
void set_predictable_code_size(bool value)
void floor_w_s(FPURegister fd, FPURegister fs)
static bool IsJ(Instr instr)
void addiu(Register rd, Register rs, int32_t j)
void cvt_d_l(FPURegister fd, FPURegister fs)
static void instr_at_put(byte *pc, Instr instr)
static bool IsLwRegFpNegOffset(Instr instr)
void target_at_put(int pos, int target_pos)
void multu(Register rs, Register rt)
void add_d(FPURegister fd, FPURegister fs, FPURegister ft)
void CheckTrampolinePool()
bool is_buffer_growth_blocked() const
static const int kNumRegisters
void ldc1(FPURegister fd, const MemOperand &src)
void cvt_d_w(FPURegister fd, FPURegister fs)
~BlockTrampolinePoolScope()
const int kNumFPURegisters
void cvt_w_d(FPURegister fd, FPURegister fs)
EnsureSpace(Assembler *assembler)
static void JumpLabelToJumpRegister(Address pc)
static const char * AllocationIndexToString(int index)
void break_(uint32_t code, bool break_as_stop=false)
static bool IsPush(Instr instr)
void ceil_w_s(FPURegister fd, FPURegister fs)
void sh(Register rd, const MemOperand &rs)
static bool IsJr(Instr instr)
DwVfpRegister DoubleRegister
static bool IsOri(Instr instr)
void sra(Register rt, Register rd, uint16_t sa)
void slt(Register rd, Register rs, Register rt)
void swl(Register rd, const MemOperand &rs)
void lwr(Register rd, const MemOperand &rs)
void BlockTrampolinePoolBefore(int pc_offset)
void lbu(Register rd, const MemOperand &rs)
static bool IsJal(Instr instr)
static const int kNumReservedRegisters
FPURegister FloatRegister
void ceil_w_d(FPURegister fd, FPURegister fs)
void trunc_l_s(FPURegister fd, FPURegister fs)
void trunc_w_s(FPURegister fd, FPURegister fs)
void srlv(Register rd, Register rt, Register rs)
static const int kSpecialTargetSize
void div_d(FPURegister fd, FPURegister fs, FPURegister ft)
void abs_d(FPURegister fd, FPURegister fs)
void sltu(Register rd, Register rs, Register rt)
void GetCode(CodeDesc *desc)
void xori(Register rd, Register rs, int32_t j)
void jal_or_jalr(int32_t target, Register rs)
static const int kPcLoadDelta
void teq(Register src1, const Operand &src2, Condition cond=al)
int branch_offset(Label *L, bool jump_elimination_allowed)
int available_space() const
static void set_target_address_at(Address pc, Address target)
static bool IsPop(Instr instr)
static const int kInstructionsFor32BitConstant
void movt(Register reg, uint32_t immediate, Condition cond=al)
static void set_external_target_at(Address instruction_payload, Address target)
void lui(Register rd, int32_t j)
void StartBlockTrampolinePool()
static Register from_code(int code)
static bool IsLw(Instr instr)
static uint32_t GetFunction(Instr instr)
bool is(FPUControlRegister creg) const
void srl(Register rd, Register rt, uint16_t sa)
void set_emit_debug_code(bool value)
static Register GetRdReg(Instr instr)
int32_t pc_offset() const
void bc1f(Label *L, uint16_t cc=0)
void tlt(Register rs, Register rt, uint16_t code)
void slti(Register rd, Register rs, int32_t j)
void srav(Register rt, Register rd, Register rs)
static uint32_t GetRtField(Instr instr)
bool is_trampoline_emitted() const
void sltiu(Register rd, Register rs, int32_t j)
void jalr(Register rs, Register rd=ra)
void floor_l_d(FPURegister fd, FPURegister fs)
void cfc1(Register rt, FPUControlRegister fs)
void StartBlockGrowBuffer()
static const int kCallTargetAddressOffset
void ins_(Register rt, Register rs, uint16_t pos, uint16_t size)
static Register GetRtReg(Instr instr)
void beq(Register rs, Register rt, Label *L)
static const int kDebugBreakSlotLength
void lw(Register rd, const MemOperand &rs)
void ceil_l_d(FPURegister fd, FPURegister fs)
bool has_exception() const
static Register GetRd(Instr instr)
static bool IsNop(Instr instr, int type=NON_MARKING_NOP)
void RecordDebugBreakSlot()
static uint32_t GetLabelConst(Instr instr)
void ori(Register rd, Register rs, int32_t j)
void stop(const char *msg, Condition cond=al, int32_t code=kDefaultStopCode)
void cvt_s_w(FPURegister fd, FPURegister fs)
void movz(Register rd, Register rs, Register rt)
int ToNumber(Register reg)
void round_l_d(FPURegister fd, FPURegister fs)
bool is(Register reg) const
static Address target_address_at(Address pc)
static const int kNumRegisters
bool is_trampoline_pool_blocked() const
void movf(Register rd, Register rs, uint16_t cc=0)
void RecordComment(const char *msg)
void fcmp(FPURegister src1, const double src2, FPUCondition cond)
static void deserialization_set_special_target_at(Address instruction_payload, Address target)
INLINE(static HeapObject *EnsureDoubleAligned(Heap *heap, HeapObject *object, int size))
static void QuietNaN(HeapObject *nan)
const FPUControlRegister FCSR
static const char * AllocationIndexToString(int index)
void bltzal(Register rs, int16_t offset)
void cvt_w_s(FPURegister fd, FPURegister fs)
void lwl(Register rd, const MemOperand &rs)
void bne(Register rs, Register rt, int16_t offset)
const FPURegister no_freg
static Address target_address_from_return_address(Address pc)
void xor_(Register dst, int32_t imm32)
void BlockTrampolinePoolFor(int instructions)
friend class PositionsRecorder
static bool IsSwRegFpOffset(Instr instr)
static bool IsJump(Instr instr)
bool OffsetIsInt16Encodable() const
static const int kNumAllocatableRegisters
void mfc1(Register rt, FPURegister fs)
void mult(Register rs, Register rt)
void subu(Register rd, Register rs, Register rt)
static int RelocateInternalReference(byte *pc, intptr_t pc_delta)
void tgeu(Register rs, Register rt, uint16_t code)
Assembler(Isolate *isolate, void *buffer, int buffer_size)
static uint32_t GetSa(Instr instr)
bool MustUseReg(RelocInfo::Mode rmode)
#define ASSERT_EQ(v1, v2)
void trunc_w_d(FPURegister fd, FPURegister fs)
static const int kJSReturnSequenceInstructions
void sllv(Register rd, Register rt, Register rs)
void ctc1(Register rt, FPUControlRegister fs)
void floor_w_d(FPURegister fd, FPURegister fs)
void bne(Register rs, Register rt, Label *L)
void sdc1(FPURegister fs, const MemOperand &dst)
void lh(Register rd, const MemOperand &rs)
static bool IsBne(Instr instr)
void bc1f(int16_t offset, uint16_t cc=0)
PositionsRecorder * positions_recorder()
static bool IsBeq(Instr instr)
static const int kInstrSize
MemOperand(Register rn, int32_t offset=0)
static const int kSizeInBytes
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if NULL
void ceil_l_s(FPURegister fd, FPURegister fs)
void tge(Register rs, Register rt, uint16_t code)
void cvt_s_d(FPURegister fd, FPURegister fs)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
static int ToAllocationIndex(Register reg)
static const int kNumAllocatableRegisters
void and_(Register dst, Register src1, const Operand &src2, SBit s=LeaveCC, Condition cond=al)
void label_at_put(Label *L, int at_offset)
void bgtz(Register rs, int16_t offset)
void nor(Register rd, Register rs, Register rt)
static int16_t GetLwOffset(Instr instr)
static FPURegister from_code(int code)
static FPURegister FromAllocationIndex(int index)
void nop(unsigned int type=0)
int32_t shifted_branch_offset(Label *L, bool jump_elimination_allowed)
static bool IsSwRegFpNegOffset(Instr instr)
void lb(Register rd, const MemOperand &rs)
Register ToRegister(int num)
uint32_t jump_address(Label *L)
void j_or_jr(int32_t target, Register rs)
void rotrv(Register rd, Register rt, Register rs)
void bgez(Register rs, int16_t offset)
void cvt_l_s(FPURegister fd, FPURegister fs)
void lhu(Register rd, const MemOperand &rs)
static const int kPatchDebugBreakSlotAddressOffset
BlockTrampolinePoolScope(Assembler *assem)
void tltu(Register rs, Register rt, uint16_t code)
static Instr SetLwOffset(Instr instr, int16_t offset)
TypeFeedbackId recorded_ast_id_
bool emit_debug_code() const
static uint32_t GetSaField(Instr instr)
void divu(Register rs, Register rt)
void bltz(Register rs, int16_t offset)
void lwc1(FPURegister fd, const MemOperand &src)
void EndBlockTrampolinePool()
void sub_d(FPURegister fd, FPURegister fs, FPURegister ft)
void mul(Register dst, Register src1, Register src2, SBit s=LeaveCC, Condition cond=al)
static bool IsLui(Instr instr)
static bool IsAndImmediate(Instr instr)
void c(FPUCondition cond, SecondaryField fmt, FPURegister ft, FPURegister fs, uint16_t cc=0)
void movn(Register rd, Register rs, Register rt)
void sb(Register rd, const MemOperand &rs)