37 #ifndef V8_ARM_ASSEMBLER_ARM_INL_H_
38 #define V8_ARM_ASSEMBLER_ARM_INL_H_
91 void RelocInfo::apply(intptr_t delta) {
92 if (RelocInfo::IsInternalReference(rmode_)) {
102 Address RelocInfo::target_address() {
103 ASSERT(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
108 Address RelocInfo::target_address_address() {
109 ASSERT(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_)
110 || rmode_ == EMBEDDED_OBJECT
111 || rmode_ == EXTERNAL_REFERENCE);
112 if (FLAG_enable_ool_constant_pool ||
116 return reinterpret_cast<Address>(pc_);
124 Address RelocInfo::constant_pool_entry_address() {
125 ASSERT(IsInConstantPool());
126 if (FLAG_enable_ool_constant_pool) {
128 return Assembler::target_constant_pool_address_at(pc_,
129 host_->constant_pool());
137 int RelocInfo::target_address_size() {
143 ASSERT(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
147 host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
153 Object* RelocInfo::target_object() {
154 ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
159 Handle<Object> RelocInfo::target_object_handle(Assembler* origin) {
160 ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
161 return Handle<Object>(
reinterpret_cast<Object**
>(
167 ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
168 ASSERT(!target->IsConsString());
170 reinterpret_cast<Address>(target));
173 target->IsHeapObject()) {
174 host()->GetHeap()->incremental_marking()->RecordWrite(
180 Address RelocInfo::target_reference() {
181 ASSERT(rmode_ == EXTERNAL_REFERENCE);
186 Address RelocInfo::target_runtime_entry(Assembler* origin) {
187 ASSERT(IsRuntimeEntry(rmode_));
188 return target_address();
192 void RelocInfo::set_target_runtime_entry(
Address target,
194 ASSERT(IsRuntimeEntry(rmode_));
195 if (target_address() != target) set_target_address(target, mode);
199 Handle<Cell> RelocInfo::target_cell_handle() {
200 ASSERT(rmode_ == RelocInfo::CELL);
202 return Handle<Cell>(
reinterpret_cast<Cell**
>(address));
206 Cell* RelocInfo::target_cell() {
207 ASSERT(rmode_ == RelocInfo::CELL);
213 ASSERT(rmode_ == RelocInfo::CELL);
219 host()->GetHeap()->incremental_marking()->RecordWrite(
225 static const int kNoCodeAgeSequenceLength = 3;
228 Handle<Object> RelocInfo::code_age_stub_handle(Assembler* origin) {
230 return Handle<Object>();
234 Code* RelocInfo::code_age_stub() {
235 ASSERT(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
238 (kNoCodeAgeSequenceLength - 1)));
242 void RelocInfo::set_code_age_stub(Code* stub) {
243 ASSERT(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
245 (kNoCodeAgeSequenceLength - 1)) =
246 stub->instruction_start();
250 Address RelocInfo::call_address() {
253 ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
254 (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
259 void RelocInfo::set_call_address(
Address target) {
260 ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
261 (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
263 if (host() !=
NULL) {
265 host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
271 Object* RelocInfo::call_object() {
272 return *call_object_address();
276 void RelocInfo::set_call_object(
Object* target) {
277 *call_object_address() = target;
281 Object** RelocInfo::call_object_address() {
282 ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
283 (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
288 void RelocInfo::WipeOut() {
289 ASSERT(IsEmbeddedObject(rmode_) ||
290 IsCodeTarget(rmode_) ||
291 IsRuntimeEntry(rmode_) ||
292 IsExternalReference(rmode_));
297 bool RelocInfo::IsPatchedReturnSequence() {
308 bool RelocInfo::IsPatchedDebugBreakSlotSequence() {
314 void RelocInfo::Visit(Isolate* isolate, ObjectVisitor* visitor) {
315 RelocInfo::Mode mode = rmode();
316 if (mode == RelocInfo::EMBEDDED_OBJECT) {
317 visitor->VisitEmbeddedPointer(
this);
318 }
else if (RelocInfo::IsCodeTarget(mode)) {
319 visitor->VisitCodeTarget(
this);
320 }
else if (mode == RelocInfo::CELL) {
321 visitor->VisitCell(
this);
322 }
else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
323 visitor->VisitExternalReference(
this);
324 }
else if (RelocInfo::IsCodeAgeSequence(mode)) {
325 visitor->VisitCodeAgeSequence(
this);
326 #ifdef ENABLE_DEBUGGER_SUPPORT
327 }
else if (((RelocInfo::IsJSReturn(mode) &&
328 IsPatchedReturnSequence()) ||
329 (RelocInfo::IsDebugBreakSlot(mode) &&
330 IsPatchedDebugBreakSlotSequence())) &&
331 isolate->debug()->has_break_points()) {
332 visitor->VisitDebugTarget(
this);
334 }
else if (RelocInfo::IsRuntimeEntry(mode)) {
335 visitor->VisitRuntimeEntry(
this);
340 template<
typename StaticVisitor>
341 void RelocInfo::Visit(Heap* heap) {
342 RelocInfo::Mode mode = rmode();
343 if (mode == RelocInfo::EMBEDDED_OBJECT) {
344 StaticVisitor::VisitEmbeddedPointer(heap,
this);
345 }
else if (RelocInfo::IsCodeTarget(mode)) {
346 StaticVisitor::VisitCodeTarget(heap,
this);
347 }
else if (mode == RelocInfo::CELL) {
348 StaticVisitor::VisitCell(heap,
this);
349 }
else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
350 StaticVisitor::VisitExternalReference(
this);
351 }
else if (RelocInfo::IsCodeAgeSequence(mode)) {
352 StaticVisitor::VisitCodeAgeSequence(heap,
this);
353 #ifdef ENABLE_DEBUGGER_SUPPORT
354 }
else if (heap->isolate()->debug()->has_break_points() &&
355 ((RelocInfo::IsJSReturn(mode) &&
356 IsPatchedReturnSequence()) ||
357 (RelocInfo::IsDebugBreakSlot(mode) &&
358 IsPatchedDebugBreakSlotSequence()))) {
359 StaticVisitor::VisitDebugTarget(heap,
this);
361 }
else if (RelocInfo::IsRuntimeEntry(mode)) {
362 StaticVisitor::VisitRuntimeEntry(
this);
376 imm32_ =
reinterpret_cast<int32_t>(f.address());
377 rmode_ = RelocInfo::EXTERNAL_REFERENCE;
383 imm32_ =
reinterpret_cast<intptr_t
>(value);
384 rmode_ = RelocInfo::NONE32;
396 bool Operand::is_reg()
const {
397 return rm_.is_valid() &&
404 void Assembler::CheckBuffer() {
414 void Assembler::emit(
Instr x) {
427 Address Assembler::target_constant_pool_address_at(
432 return reinterpret_cast<Address>(constant_pool) +
443 return reinterpret_cast<Address>(
446 }
else if (FLAG_enable_ool_constant_pool) {
449 target_constant_pool_address_at(pc, constant_pool));
496 if (FLAG_enable_ool_constant_pool) {
504 static Instr EncodeMovwImmediate(uint32_t immediate) {
505 ASSERT(immediate < 0x10000);
506 return ((immediate & 0xf000) << 4) | (immediate & 0xfff);
515 uint32_t* instr_ptr =
reinterpret_cast<uint32_t*
>(
pc);
516 uint32_t immediate =
reinterpret_cast<uint32_t
>(target);
517 uint32_t intermediate = instr_ptr[0];
518 intermediate &= ~EncodeMovwImmediate(0xFFFF);
519 intermediate |= EncodeMovwImmediate(immediate & 0xFFFF);
520 instr_ptr[0] = intermediate;
521 intermediate = instr_ptr[1];
522 intermediate &= ~EncodeMovwImmediate(0xFFFF);
523 intermediate |= EncodeMovwImmediate(immediate >> 16);
524 instr_ptr[1] = intermediate;
528 }
else if (FLAG_enable_ool_constant_pool) {
531 target_constant_pool_address_at(pc, constant_pool)) = target;
549 #endif // V8_ARM_ASSEMBLER_ARM_INL_H_
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter NULL
static DwVfpRegister FromAllocationIndex(int index)
static Object *& Object_at(Address addr)
static const int kValueOffset
const Instr kLdrPCPattern
static bool IsMovW(Instr instr)
static HeapObject * cast(Object *obj)
static int NumAllocatableRegisters()
kSerializedDataOffset Object
static bool IsSupported(CpuFeature f)
static bool IsLdrPpImmediateOffset(Instr instr)
static Address target_address_at(Address pc, ConstantPoolArray *constant_pool)
static int NumReservedRegisters()
#define ASSERT(condition)
static int NumAllocatableRegisters()
static DwVfpRegister from_code(int code)
static void deserialization_set_special_target_at(Address constant_pool_entry, Code *code, Address target)
static Instruction * At(byte *pc)
int ImmedMovwMovtValue() const
static const int kMaxNumAllocatableRegisters
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_string(expose_natives_as
#define kScratchDoubleReg
static const int kPcLoadDelta
static Address & Address_at(Address addr)
static int NumRegisters()
static int32_t & int32_at(Address addr)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
static bool IsMovT(Instr instr)
static Code * GetCodeFromTargetAddress(Address address)
void CheckConstPool(bool force_emit, bool require_jump)
static Address target_pointer_address_at(Address pc)
static bool IsNop(Instr instr, int type=NON_MARKING_NOP)
Operand(Register reg, Shift shift=LSL, unsigned shift_amount=0)
static int ToAllocationIndex(DwVfpRegister reg)
const Instr kBlxRegPattern
static Address target_address_from_return_address(Address pc)
static Address return_address_from_call_start(Address pc)
bool is(DwVfpRegister reg) const
static const int kInstrSize
static void set_target_address_at(Address pc, ConstantPoolArray *constant_pool, Address target)
int64_t immediate() const
static Cell * FromValueAddress(Address value)
static int GetLdrRegisterImmediateOffset(Instr instr)
RelocInfo::Mode rmode() const
static const int kNumReservedRegisters
static bool IsLdrPcImmediateOffset(Instr instr)