v8  3.11.10(node0.8.26)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
assembler-arm-inl.h
Go to the documentation of this file.
1 // Copyright (c) 1994-2006 Sun Microsystems Inc.
2 // All Rights Reserved.
3 //
4 // Redistribution and use in source and binary forms, with or without
5 // modification, are permitted provided that the following conditions
6 // are met:
7 //
8 // - Redistributions of source code must retain the above copyright notice,
9 // this list of conditions and the following disclaimer.
10 //
11 // - Redistribution in binary form must reproduce the above copyright
12 // notice, this list of conditions and the following disclaimer in the
13 // documentation and/or other materials provided with the
14 // distribution.
15 //
16 // - Neither the name of Sun Microsystems or the names of contributors may
17 // be used to endorse or promote products derived from this software without
18 // specific prior written permission.
19 //
20 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
21 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
22 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
23 // FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
24 // COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
25 // INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
26 // (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
27 // SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
28 // HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
29 // STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
30 // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
31 // OF THE POSSIBILITY OF SUCH DAMAGE.
32 
33 // The original source code covered by the above license above has been modified
34 // significantly by Google Inc.
35 // Copyright 2012 the V8 project authors. All rights reserved.
36 
37 #ifndef V8_ARM_ASSEMBLER_ARM_INL_H_
38 #define V8_ARM_ASSEMBLER_ARM_INL_H_
39 
40 #include "arm/assembler-arm.h"
41 
42 #include "cpu.h"
43 #include "debug.h"
44 
45 
46 namespace v8 {
47 namespace internal {
48 
49 
51  ASSERT(!reg.is(kDoubleRegZero));
53  return reg.code();
54 }
55 
56 
57 void RelocInfo::apply(intptr_t delta) {
58  if (RelocInfo::IsInternalReference(rmode_)) {
59  // absolute code pointer inside code object moves with the code object.
60  int32_t* p = reinterpret_cast<int32_t*>(pc_);
61  *p += delta; // relocate entry
62  }
63  // We do not use pc relative addressing on ARM, so there is
64  // nothing else to do.
65 }
66 
67 
68 Address RelocInfo::target_address() {
69  ASSERT(IsCodeTarget(rmode_) || rmode_ == RUNTIME_ENTRY);
70  return Assembler::target_address_at(pc_);
71 }
72 
73 
74 Address RelocInfo::target_address_address() {
75  ASSERT(IsCodeTarget(rmode_) || rmode_ == RUNTIME_ENTRY
76  || rmode_ == EMBEDDED_OBJECT
77  || rmode_ == EXTERNAL_REFERENCE);
78  return reinterpret_cast<Address>(Assembler::target_address_address_at(pc_));
79 }
80 
81 
82 int RelocInfo::target_address_size() {
83  return kPointerSize;
84 }
85 
86 
87 void RelocInfo::set_target_address(Address target, WriteBarrierMode mode) {
88  ASSERT(IsCodeTarget(rmode_) || rmode_ == RUNTIME_ENTRY);
90  if (mode == UPDATE_WRITE_BARRIER && host() != NULL && IsCodeTarget(rmode_)) {
91  Object* target_code = Code::GetCodeFromTargetAddress(target);
92  host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
93  host(), this, HeapObject::cast(target_code));
94  }
95 }
96 
97 
98 Object* RelocInfo::target_object() {
99  ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
100  return Memory::Object_at(Assembler::target_address_address_at(pc_));
101 }
102 
103 
104 Handle<Object> RelocInfo::target_object_handle(Assembler* origin) {
105  ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
106  return Memory::Object_Handle_at(Assembler::target_address_address_at(pc_));
107 }
108 
109 
110 Object** RelocInfo::target_object_address() {
111  ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
112  return reinterpret_cast<Object**>(Assembler::target_address_address_at(pc_));
113 }
114 
115 
116 void RelocInfo::set_target_object(Object* target, WriteBarrierMode mode) {
117  ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
118  Assembler::set_target_address_at(pc_, reinterpret_cast<Address>(target));
119  if (mode == UPDATE_WRITE_BARRIER &&
120  host() != NULL &&
121  target->IsHeapObject()) {
122  host()->GetHeap()->incremental_marking()->RecordWrite(
123  host(), &Memory::Object_at(pc_), HeapObject::cast(target));
124  }
125 }
126 
127 
128 Address* RelocInfo::target_reference_address() {
129  ASSERT(rmode_ == EXTERNAL_REFERENCE);
130  return reinterpret_cast<Address*>(Assembler::target_address_address_at(pc_));
131 }
132 
133 
134 Handle<JSGlobalPropertyCell> RelocInfo::target_cell_handle() {
135  ASSERT(rmode_ == RelocInfo::GLOBAL_PROPERTY_CELL);
136  Address address = Memory::Address_at(pc_);
137  return Handle<JSGlobalPropertyCell>(
138  reinterpret_cast<JSGlobalPropertyCell**>(address));
139 }
140 
141 
142 JSGlobalPropertyCell* RelocInfo::target_cell() {
143  ASSERT(rmode_ == RelocInfo::GLOBAL_PROPERTY_CELL);
144  Address address = Memory::Address_at(pc_);
147  return reinterpret_cast<JSGlobalPropertyCell*>(object);
148 }
149 
150 
151 void RelocInfo::set_target_cell(JSGlobalPropertyCell* cell,
152  WriteBarrierMode mode) {
153  ASSERT(rmode_ == RelocInfo::GLOBAL_PROPERTY_CELL);
154  Address address = cell->address() + JSGlobalPropertyCell::kValueOffset;
155  Memory::Address_at(pc_) = address;
156  if (mode == UPDATE_WRITE_BARRIER && host() != NULL) {
157  // TODO(1550) We are passing NULL as a slot because cell can never be on
158  // evacuation candidate.
159  host()->GetHeap()->incremental_marking()->RecordWrite(
160  host(), NULL, cell);
161  }
162 }
163 
164 
165 Address RelocInfo::call_address() {
166  // The 2 instructions offset assumes patched debug break slot or return
167  // sequence.
168  ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
169  (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
170  return Memory::Address_at(pc_ + 2 * Assembler::kInstrSize);
171 }
172 
173 
174 void RelocInfo::set_call_address(Address target) {
175  ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
176  (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
177  Memory::Address_at(pc_ + 2 * Assembler::kInstrSize) = target;
178  if (host() != NULL) {
179  Object* target_code = Code::GetCodeFromTargetAddress(target);
180  host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
181  host(), this, HeapObject::cast(target_code));
182  }
183 }
184 
185 
186 Object* RelocInfo::call_object() {
187  return *call_object_address();
188 }
189 
190 
191 void RelocInfo::set_call_object(Object* target) {
192  *call_object_address() = target;
193 }
194 
195 
196 Object** RelocInfo::call_object_address() {
197  ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
198  (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
199  return reinterpret_cast<Object**>(pc_ + 2 * Assembler::kInstrSize);
200 }
201 
202 
203 bool RelocInfo::IsPatchedReturnSequence() {
204  Instr current_instr = Assembler::instr_at(pc_);
205  Instr next_instr = Assembler::instr_at(pc_ + Assembler::kInstrSize);
206 #ifdef USE_BLX
207  // A patched return sequence is:
208  // ldr ip, [pc, #0]
209  // blx ip
210  return ((current_instr & kLdrPCMask) == kLdrPCPattern)
211  && ((next_instr & kBlxRegMask) == kBlxRegPattern);
212 #else
213  // A patched return sequence is:
214  // mov lr, pc
215  // ldr pc, [pc, #-4]
216  return (current_instr == kMovLrPc)
217  && ((next_instr & kLdrPCMask) == kLdrPCPattern);
218 #endif
219 }
220 
221 
222 bool RelocInfo::IsPatchedDebugBreakSlotSequence() {
223  Instr current_instr = Assembler::instr_at(pc_);
224  return !Assembler::IsNop(current_instr, Assembler::DEBUG_BREAK_NOP);
225 }
226 
227 
228 void RelocInfo::Visit(ObjectVisitor* visitor) {
229  RelocInfo::Mode mode = rmode();
230  if (mode == RelocInfo::EMBEDDED_OBJECT) {
231  visitor->VisitEmbeddedPointer(this);
232  } else if (RelocInfo::IsCodeTarget(mode)) {
233  visitor->VisitCodeTarget(this);
234  } else if (mode == RelocInfo::GLOBAL_PROPERTY_CELL) {
235  visitor->VisitGlobalPropertyCell(this);
236  } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
237  visitor->VisitExternalReference(this);
238 #ifdef ENABLE_DEBUGGER_SUPPORT
239  // TODO(isolates): Get a cached isolate below.
240  } else if (((RelocInfo::IsJSReturn(mode) &&
241  IsPatchedReturnSequence()) ||
242  (RelocInfo::IsDebugBreakSlot(mode) &&
243  IsPatchedDebugBreakSlotSequence())) &&
244  Isolate::Current()->debug()->has_break_points()) {
245  visitor->VisitDebugTarget(this);
246 #endif
247  } else if (mode == RelocInfo::RUNTIME_ENTRY) {
248  visitor->VisitRuntimeEntry(this);
249  }
250 }
251 
252 
253 template<typename StaticVisitor>
254 void RelocInfo::Visit(Heap* heap) {
255  RelocInfo::Mode mode = rmode();
256  if (mode == RelocInfo::EMBEDDED_OBJECT) {
257  StaticVisitor::VisitEmbeddedPointer(heap, this);
258  } else if (RelocInfo::IsCodeTarget(mode)) {
259  StaticVisitor::VisitCodeTarget(heap, this);
260  } else if (mode == RelocInfo::GLOBAL_PROPERTY_CELL) {
261  StaticVisitor::VisitGlobalPropertyCell(heap, this);
262  } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
263  StaticVisitor::VisitExternalReference(this);
264 #ifdef ENABLE_DEBUGGER_SUPPORT
265  } else if (heap->isolate()->debug()->has_break_points() &&
266  ((RelocInfo::IsJSReturn(mode) &&
267  IsPatchedReturnSequence()) ||
268  (RelocInfo::IsDebugBreakSlot(mode) &&
269  IsPatchedDebugBreakSlotSequence()))) {
270  StaticVisitor::VisitDebugTarget(heap, this);
271 #endif
272  } else if (mode == RelocInfo::RUNTIME_ENTRY) {
273  StaticVisitor::VisitRuntimeEntry(this);
274  }
275 }
276 
277 
278 Operand::Operand(int32_t immediate, RelocInfo::Mode rmode) {
279  rm_ = no_reg;
280  imm32_ = immediate;
281  rmode_ = rmode;
282 }
283 
284 
285 Operand::Operand(const ExternalReference& f) {
286  rm_ = no_reg;
287  imm32_ = reinterpret_cast<int32_t>(f.address());
288  rmode_ = RelocInfo::EXTERNAL_REFERENCE;
289 }
290 
291 
292 Operand::Operand(Smi* value) {
293  rm_ = no_reg;
294  imm32_ = reinterpret_cast<intptr_t>(value);
295  rmode_ = RelocInfo::NONE;
296 }
297 
298 
299 Operand::Operand(Register rm) {
300  rm_ = rm;
301  rs_ = no_reg;
302  shift_op_ = LSL;
303  shift_imm_ = 0;
304 }
305 
306 
307 bool Operand::is_reg() const {
308  return rm_.is_valid() &&
309  rs_.is(no_reg) &&
310  shift_op_ == LSL &&
311  shift_imm_ == 0;
312 }
313 
314 
315 void Assembler::CheckBuffer() {
316  if (buffer_space() <= kGap) {
317  GrowBuffer();
318  }
319  if (pc_offset() >= next_buffer_check_) {
320  CheckConstPool(false, true);
321  }
322 }
323 
324 
325 void Assembler::emit(Instr x) {
326  CheckBuffer();
327  *reinterpret_cast<Instr*>(pc_) = x;
328  pc_ += kInstrSize;
329 }
330 
331 
332 Address Assembler::target_address_address_at(Address pc) {
333  Address target_pc = pc;
334  Instr instr = Memory::int32_at(target_pc);
335  // If we have a bx instruction, the instruction before the bx is
336  // what we need to patch.
337  static const int32_t kBxInstMask = 0x0ffffff0;
338  static const int32_t kBxInstPattern = 0x012fff10;
339  if ((instr & kBxInstMask) == kBxInstPattern) {
340  target_pc -= kInstrSize;
341  instr = Memory::int32_at(target_pc);
342  }
343 
344 #ifdef USE_BLX
345  // If we have a blx instruction, the instruction before it is
346  // what needs to be patched.
347  if ((instr & kBlxRegMask) == kBlxRegPattern) {
348  target_pc -= kInstrSize;
349  instr = Memory::int32_at(target_pc);
350  }
351 #endif
352 
354  int offset = instr & 0xfff; // offset_12 is unsigned
355  if ((instr & (1 << 23)) == 0) offset = -offset; // U bit defines offset sign
356  // Verify that the constant pool comes after the instruction referencing it.
357  ASSERT(offset >= -4);
358  return target_pc + offset + 8;
359 }
360 
361 
363  return Memory::Address_at(target_address_address_at(pc));
364 }
365 
366 
368  Address constant_pool_entry, Address target) {
369  Memory::Address_at(constant_pool_entry) = target;
370 }
371 
372 
374  Address target) {
375  Memory::Address_at(constant_pool_entry) = target;
376 }
377 
378 
380  Memory::Address_at(target_address_address_at(pc)) = target;
381  // Intuitively, we would think it is necessary to flush the instruction cache
382  // after patching a target address in the code as follows:
383  // CPU::FlushICache(pc, sizeof(target));
384  // However, on ARM, no instruction was actually patched by the assignment
385  // above; the target address is not part of an instruction, it is patched in
386  // the constant pool and is read via a data access; the instruction accessing
387  // this address in the constant pool remains unchanged.
388 }
389 
390 } } // namespace v8::internal
391 
392 #endif // V8_ARM_ASSEMBLER_ARM_INL_H_
byte * Address
Definition: globals.h:172
static Object *& Object_at(Address addr)
Definition: v8memory.h:75
const Instr kLdrPCMask
const Instr kLdrPCPattern
static void deserialization_set_special_target_at(Address constant_pool_entry, Address target)
static Handle< Object > & Object_Handle_at(Address addr)
Definition: v8memory.h:79
const Instr kMovLrPc
static HeapObject * cast(Object *obj)
int int32_t
Definition: unicode.cc:47
#define ASSERT(condition)
Definition: checks.h:270
const Instr kBlxRegMask
const int kPointerSize
Definition: globals.h:234
#define kScratchDoubleReg
static Address & Address_at(Address addr)
Definition: v8memory.h:71
static void set_target_address_at(Address pc, Address target)
static int32_t & int32_at(Address addr)
Definition: v8memory.h:51
static void set_external_target_at(Address constant_pool_entry, Address target)
const Register pc
static Code * GetCodeFromTargetAddress(Address address)
Definition: objects-inl.h:3380
void CheckConstPool(bool force_emit, bool require_jump)
static bool IsNop(Instr instr, int type=NON_MARKING_NOP)
static Address target_address_at(Address pc)
#define kDoubleRegZero
static int ToAllocationIndex(DwVfpRegister reg)
const Instr kBlxRegPattern
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination trace on stack replacement optimize closures functions with arguments object optimize functions containing for in loops profiler considers IC stability primitive functions trigger their own optimization re try self optimization if it failed insert an interrupt check at function exit execution budget before interrupt is triggered call count before self optimization self_optimization count_based_interrupts weighted_back_edges trace_opt emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 enable use of ARMv7 instructions if enable use of MIPS FPU instructions if NULL
Definition: flags.cc:274
static HeapObject * FromAddress(Address address)
Definition: objects-inl.h:1163
bool is(DwVfpRegister reg) const
static const int kInstrSize
#define RUNTIME_ENTRY(name, nargs, ressize)
const Register no_reg
static bool IsLdrPcImmediateOffset(Instr instr)