37 #ifndef V8_IA32_ASSEMBLER_IA32_INL_H_
38 #define V8_IA32_ASSEMBLER_IA32_INL_H_
50 void RelocInfo::apply(intptr_t delta) {
55 }
else if (rmode_ == JS_RETURN && IsPatchedReturnSequence()) {
61 }
else if (rmode_ == DEBUG_BREAK_SLOT && IsPatchedDebugBreakSlotSequence()) {
67 }
else if (IsInternalReference(rmode_)) {
76 Address RelocInfo::target_address() {
82 Address RelocInfo::target_address_address() {
84 || rmode_ == EMBEDDED_OBJECT
85 || rmode_ == EXTERNAL_REFERENCE);
86 return reinterpret_cast<Address>(pc_);
90 int RelocInfo::target_address_size() {
100 host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
106 Object* RelocInfo::target_object() {
107 ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
112 Handle<Object> RelocInfo::target_object_handle(Assembler* origin) {
113 ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
118 Object** RelocInfo::target_object_address() {
119 ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
125 ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
130 target->IsHeapObject()) {
131 host()->GetHeap()->incremental_marking()->RecordWrite(
137 Address* RelocInfo::target_reference_address() {
138 ASSERT(rmode_ == RelocInfo::EXTERNAL_REFERENCE);
139 return reinterpret_cast<Address*
>(pc_);
143 Handle<JSGlobalPropertyCell> RelocInfo::target_cell_handle() {
144 ASSERT(rmode_ == RelocInfo::GLOBAL_PROPERTY_CELL);
146 return Handle<JSGlobalPropertyCell>(
147 reinterpret_cast<JSGlobalPropertyCell**
>(address));
151 JSGlobalPropertyCell* RelocInfo::target_cell() {
152 ASSERT(rmode_ == RelocInfo::GLOBAL_PROPERTY_CELL);
156 return reinterpret_cast<JSGlobalPropertyCell*
>(object);
160 void RelocInfo::set_target_cell(JSGlobalPropertyCell* cell,
162 ASSERT(rmode_ == RelocInfo::GLOBAL_PROPERTY_CELL);
169 host()->GetHeap()->incremental_marking()->RecordWrite(
175 Address RelocInfo::call_address() {
176 ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
177 (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
182 void RelocInfo::set_call_address(
Address target) {
183 ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
184 (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
186 if (host() !=
NULL) {
188 host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
194 Object* RelocInfo::call_object() {
195 return *call_object_address();
199 void RelocInfo::set_call_object(
Object* target) {
200 *call_object_address() = target;
204 Object** RelocInfo::call_object_address() {
205 ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
206 (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
207 return reinterpret_cast<Object**
>(pc_ + 1);
211 bool RelocInfo::IsPatchedReturnSequence() {
216 bool RelocInfo::IsPatchedDebugBreakSlotSequence() {
221 void RelocInfo::Visit(ObjectVisitor* visitor) {
222 RelocInfo::Mode mode = rmode();
223 if (mode == RelocInfo::EMBEDDED_OBJECT) {
224 visitor->VisitEmbeddedPointer(
this);
226 }
else if (RelocInfo::IsCodeTarget(mode)) {
227 visitor->VisitCodeTarget(
this);
228 }
else if (mode == RelocInfo::GLOBAL_PROPERTY_CELL) {
229 visitor->VisitGlobalPropertyCell(
this);
230 }
else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
231 visitor->VisitExternalReference(
this);
233 #ifdef ENABLE_DEBUGGER_SUPPORT
235 }
else if (((RelocInfo::IsJSReturn(mode) &&
236 IsPatchedReturnSequence()) ||
237 (RelocInfo::IsDebugBreakSlot(mode) &&
238 IsPatchedDebugBreakSlotSequence())) &&
239 Isolate::Current()->debug()->has_break_points()) {
240 visitor->VisitDebugTarget(
this);
243 visitor->VisitRuntimeEntry(
this);
248 template<
typename StaticVisitor>
249 void RelocInfo::Visit(Heap* heap) {
250 RelocInfo::Mode mode = rmode();
251 if (mode == RelocInfo::EMBEDDED_OBJECT) {
252 StaticVisitor::VisitEmbeddedPointer(heap,
this);
254 }
else if (RelocInfo::IsCodeTarget(mode)) {
255 StaticVisitor::VisitCodeTarget(heap,
this);
256 }
else if (mode == RelocInfo::GLOBAL_PROPERTY_CELL) {
257 StaticVisitor::VisitGlobalPropertyCell(heap,
this);
258 }
else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
259 StaticVisitor::VisitExternalReference(
this);
261 #ifdef ENABLE_DEBUGGER_SUPPORT
262 }
else if (heap->isolate()->debug()->has_break_points() &&
263 ((RelocInfo::IsJSReturn(mode) &&
264 IsPatchedReturnSequence()) ||
265 (RelocInfo::IsDebugBreakSlot(mode) &&
266 IsPatchedDebugBreakSlotSequence()))) {
267 StaticVisitor::VisitDebugTarget(heap,
this);
270 StaticVisitor::VisitRuntimeEntry(
this);
276 Immediate::Immediate(
int x) {
282 Immediate::Immediate(
const ExternalReference& ext) {
283 x_ =
reinterpret_cast<int32_t>(ext.address());
284 rmode_ = RelocInfo::EXTERNAL_REFERENCE;
288 Immediate::Immediate(Label* internal_offset) {
289 x_ =
reinterpret_cast<int32_t>(internal_offset);
290 rmode_ = RelocInfo::INTERNAL_REFERENCE;
294 Immediate::Immediate(Handle<Object> handle) {
298 if (obj->IsHeapObject()) {
299 x_ =
reinterpret_cast<intptr_t
>(handle.location());
300 rmode_ = RelocInfo::EMBEDDED_OBJECT;
303 x_ =
reinterpret_cast<intptr_t
>(obj);
309 Immediate::Immediate(Smi* value) {
310 x_ =
reinterpret_cast<intptr_t
>(value);
315 Immediate::Immediate(
Address addr) {
316 x_ =
reinterpret_cast<int32_t>(addr);
321 void Assembler::emit(uint32_t x) {
322 *
reinterpret_cast<uint32_t*
>(pc_) = x;
323 pc_ +=
sizeof(uint32_t);
327 void Assembler::emit(Handle<Object> handle) {
331 if (obj->IsHeapObject()) {
332 emit(reinterpret_cast<intptr_t>(handle.location()),
333 RelocInfo::EMBEDDED_OBJECT);
336 emit(reinterpret_cast<intptr_t>(obj));
341 void Assembler::emit(uint32_t x, RelocInfo::Mode rmode,
unsigned id) {
342 if (rmode == RelocInfo::CODE_TARGET &&
id !=
kNoASTId) {
343 RecordRelocInfo(RelocInfo::CODE_TARGET_WITH_ID, static_cast<intptr_t>(
id));
345 RecordRelocInfo(rmode);
351 void Assembler::emit(
const Immediate& x) {
352 if (x.rmode_ == RelocInfo::INTERNAL_REFERENCE) {
353 Label* label =
reinterpret_cast<Label*
>(x.x_);
354 emit_code_relative_offset(label);
362 void Assembler::emit_code_relative_offset(Label* label) {
363 if (label->is_bound()) {
368 emit_disp(label, Displacement::CODE_RELATIVE);
373 void Assembler::emit_w(
const Immediate& x) {
376 reinterpret_cast<uint16_t*
>(pc_)[0] = value;
382 return pc +
sizeof(
int32_t) + *reinterpret_cast<int32_t*>(pc);
388 *p = target - (pc +
sizeof(
int32_t));
393 Displacement Assembler::disp_at(Label*
L) {
394 return Displacement(long_at(L->pos()));
398 void Assembler::disp_at_put(Label* L, Displacement disp) {
399 long_at_put(L->pos(), disp.data());
403 void Assembler::emit_disp(Label* L, Displacement::Type
type) {
404 Displacement disp(L, type);
406 emit(static_cast<int>(disp.data()));
410 void Assembler::emit_near_disp(Label* L) {
412 if (L->is_near_linked()) {
413 int offset = L->near_link_pos() -
pc_offset();
415 disp =
static_cast<byte>(offset & 0xFF);
422 void Operand::set_modrm(
int mod, Register rm) {
424 buf_[0] = mod << 6 | rm.code();
429 void Operand::set_sib(
ScaleFactor scale, Register index, Register base) {
431 ASSERT((scale & -4) == 0);
434 buf_[1] = scale << 6 | index.code() << 3 | base.code();
439 void Operand::set_disp8(int8_t disp) {
440 ASSERT(len_ == 1 || len_ == 2);
441 *
reinterpret_cast<int8_t*
>(&buf_[len_++]) = disp;
445 void Operand::set_dispr(
int32_t disp, RelocInfo::Mode rmode) {
446 ASSERT(len_ == 1 || len_ == 2);
453 Operand::Operand(Register reg) {
459 Operand::Operand(XMMRegister xmm_reg) {
460 Register reg = { xmm_reg.code() };
465 Operand::Operand(
int32_t disp, RelocInfo::Mode rmode) {
468 set_dispr(disp, rmode);
473 #endif // V8_IA32_ASSEMBLER_IA32_INL_H_
Isolate * isolate() const
static Object *& Object_at(Address addr)
static Handle< Object > & Object_Handle_at(Address addr)
static HeapObject * cast(Object *obj)
#define ASSERT(condition)
static const int kSpecialTargetSize
static Address & Address_at(Address addr)
static void set_target_address_at(Address pc, Address target)
static Code * GetCodeFromTargetAddress(Address address)
static bool IsNop(Instr instr, int type=NON_MARKING_NOP)
static Address target_address_at(Address pc)
static const int kHeaderSize
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination trace on stack replacement optimize closures functions with arguments object optimize functions containing for in loops profiler considers IC stability primitive functions trigger their own optimization re try self optimization if it failed insert an interrupt check at function exit execution budget before interrupt is triggered call count before self optimization self_optimization count_based_interrupts weighted_back_edges trace_opt emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 enable use of ARMv7 instructions if enable use of MIPS FPU instructions if NULL
static HeapObject * FromAddress(Address address)
#define RUNTIME_ENTRY(name, nargs, ressize)
static void FlushICache(void *start, size_t size)
static const int kValueOffset