28 #ifndef V8_X64_ASSEMBLER_X64_INL_H_
29 #define V8_X64_ASSEMBLER_X64_INL_H_
45 void Assembler::emitl(uint32_t x) {
47 pc_ +=
sizeof(uint32_t);
51 void Assembler::emitq(uint64_t x, RelocInfo::Mode rmode) {
54 RecordRelocInfo(rmode, x);
56 pc_ +=
sizeof(uint64_t);
66 void Assembler::emit_code_target(Handle<Code> target,
67 RelocInfo::Mode rmode,
69 ASSERT(RelocInfo::IsCodeTarget(rmode));
70 if (rmode == RelocInfo::CODE_TARGET && ast_id !=
kNoASTId) {
71 RecordRelocInfo(RelocInfo::CODE_TARGET_WITH_ID, ast_id);
73 RecordRelocInfo(rmode);
75 int current = code_targets_.length();
76 if (current > 0 && code_targets_.last().is_identical_to(target)) {
80 code_targets_.Add(target);
86 void Assembler::emit_rex_64(Register reg, Register rm_reg) {
87 emit(0x48 | reg.high_bit() << 2 | rm_reg.high_bit());
91 void Assembler::emit_rex_64(XMMRegister reg, Register rm_reg) {
92 emit(0x48 | (reg.code() & 0x8) >> 1 | rm_reg.code() >> 3);
96 void Assembler::emit_rex_64(Register reg, XMMRegister rm_reg) {
97 emit(0x48 | (reg.code() & 0x8) >> 1 | rm_reg.code() >> 3);
101 void Assembler::emit_rex_64(Register reg,
const Operand& op) {
102 emit(0x48 | reg.high_bit() << 2 | op.rex_);
106 void Assembler::emit_rex_64(XMMRegister reg,
const Operand& op) {
107 emit(0x48 | (reg.code() & 0x8) >> 1 | op.rex_);
111 void Assembler::emit_rex_64(Register rm_reg) {
112 ASSERT_EQ(rm_reg.code() & 0xf, rm_reg.code());
113 emit(0x48 | rm_reg.high_bit());
117 void Assembler::emit_rex_64(
const Operand& op) {
118 emit(0x48 | op.rex_);
122 void Assembler::emit_rex_32(Register reg, Register rm_reg) {
123 emit(0x40 | reg.high_bit() << 2 | rm_reg.high_bit());
127 void Assembler::emit_rex_32(Register reg,
const Operand& op) {
128 emit(0x40 | reg.high_bit() << 2 | op.rex_);
132 void Assembler::emit_rex_32(Register rm_reg) {
133 emit(0x40 | rm_reg.high_bit());
137 void Assembler::emit_rex_32(
const Operand& op) {
138 emit(0x40 | op.rex_);
142 void Assembler::emit_optional_rex_32(Register reg, Register rm_reg) {
143 byte rex_bits = reg.high_bit() << 2 | rm_reg.high_bit();
144 if (rex_bits != 0) emit(0x40 | rex_bits);
148 void Assembler::emit_optional_rex_32(Register reg,
const Operand& op) {
149 byte rex_bits = reg.high_bit() << 2 | op.rex_;
150 if (rex_bits != 0) emit(0x40 | rex_bits);
154 void Assembler::emit_optional_rex_32(XMMRegister reg,
const Operand& op) {
155 byte rex_bits = (reg.code() & 0x8) >> 1 | op.rex_;
156 if (rex_bits != 0) emit(0x40 | rex_bits);
160 void Assembler::emit_optional_rex_32(XMMRegister reg, XMMRegister base) {
161 byte rex_bits = (reg.code() & 0x8) >> 1 | (base.code() & 0x8) >> 3;
162 if (rex_bits != 0) emit(0x40 | rex_bits);
166 void Assembler::emit_optional_rex_32(XMMRegister reg, Register base) {
167 byte rex_bits = (reg.code() & 0x8) >> 1 | (base.code() & 0x8) >> 3;
168 if (rex_bits != 0) emit(0x40 | rex_bits);
172 void Assembler::emit_optional_rex_32(Register reg, XMMRegister base) {
173 byte rex_bits = (reg.code() & 0x8) >> 1 | (base.code() & 0x8) >> 3;
174 if (rex_bits != 0) emit(0x40 | rex_bits);
178 void Assembler::emit_optional_rex_32(Register rm_reg) {
179 if (rm_reg.high_bit()) emit(0x41);
183 void Assembler::emit_optional_rex_32(
const Operand& op) {
184 if (op.rex_ != 0) emit(0x40 | op.rex_);
206 void RelocInfo::apply(intptr_t delta) {
207 if (IsInternalReference(rmode_)) {
211 }
else if (IsCodeTarget(rmode_)) {
218 Address RelocInfo::target_address() {
220 if (IsCodeTarget(rmode_)) {
228 Address RelocInfo::target_address_address() {
230 || rmode_ == EMBEDDED_OBJECT
231 || rmode_ == EXTERNAL_REFERENCE);
232 return reinterpret_cast<Address>(pc_);
236 int RelocInfo::target_address_size() {
237 if (IsCodedSpecially()) {
247 if (IsCodeTarget(rmode_)) {
251 host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
261 Object* RelocInfo::target_object() {
262 ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
267 Handle<Object> RelocInfo::target_object_handle(Assembler* origin) {
268 ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
269 if (rmode_ == EMBEDDED_OBJECT) {
272 return origin->code_target_object_handle_at(pc_);
277 Object** RelocInfo::target_object_address() {
278 ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
279 return reinterpret_cast<Object**
>(pc_);
283 Address* RelocInfo::target_reference_address() {
284 ASSERT(rmode_ == RelocInfo::EXTERNAL_REFERENCE);
285 return reinterpret_cast<Address*
>(pc_);
290 ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
295 target->IsHeapObject()) {
296 host()->GetHeap()->incremental_marking()->RecordWrite(
302 Handle<JSGlobalPropertyCell> RelocInfo::target_cell_handle() {
303 ASSERT(rmode_ == RelocInfo::GLOBAL_PROPERTY_CELL);
305 return Handle<JSGlobalPropertyCell>(
306 reinterpret_cast<JSGlobalPropertyCell**
>(address));
310 JSGlobalPropertyCell* RelocInfo::target_cell() {
311 ASSERT(rmode_ == RelocInfo::GLOBAL_PROPERTY_CELL);
315 return reinterpret_cast<JSGlobalPropertyCell*
>(object);
319 void RelocInfo::set_target_cell(JSGlobalPropertyCell* cell,
321 ASSERT(rmode_ == RelocInfo::GLOBAL_PROPERTY_CELL);
329 host()->GetHeap()->incremental_marking()->RecordWrite(
335 bool RelocInfo::IsPatchedReturnSequence() {
342 #ifdef ENABLE_DEBUGGER_SUPPORT
343 return pc_[10] != 0xCC;
350 bool RelocInfo::IsPatchedDebugBreakSlotSequence() {
355 Address RelocInfo::call_address() {
356 ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
357 (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
363 void RelocInfo::set_call_address(
Address target) {
364 ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
365 (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
370 if (host() !=
NULL) {
372 host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
378 Object* RelocInfo::call_object() {
379 return *call_object_address();
383 void RelocInfo::set_call_object(
Object* target) {
384 *call_object_address() = target;
388 Object** RelocInfo::call_object_address() {
389 ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
390 (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
391 return reinterpret_cast<Object**
>(
396 void RelocInfo::Visit(ObjectVisitor* visitor) {
397 RelocInfo::Mode mode = rmode();
398 if (mode == RelocInfo::EMBEDDED_OBJECT) {
399 visitor->VisitEmbeddedPointer(
this);
401 }
else if (RelocInfo::IsCodeTarget(mode)) {
402 visitor->VisitCodeTarget(
this);
403 }
else if (mode == RelocInfo::GLOBAL_PROPERTY_CELL) {
404 visitor->VisitGlobalPropertyCell(
this);
405 }
else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
406 visitor->VisitExternalReference(
this);
408 #ifdef ENABLE_DEBUGGER_SUPPORT
410 }
else if (((RelocInfo::IsJSReturn(mode) &&
411 IsPatchedReturnSequence()) ||
412 (RelocInfo::IsDebugBreakSlot(mode) &&
413 IsPatchedDebugBreakSlotSequence())) &&
414 Isolate::Current()->debug()->has_break_points()) {
415 visitor->VisitDebugTarget(
this);
418 visitor->VisitRuntimeEntry(
this);
423 template<
typename StaticVisitor>
424 void RelocInfo::Visit(Heap* heap) {
425 RelocInfo::Mode mode = rmode();
426 if (mode == RelocInfo::EMBEDDED_OBJECT) {
427 StaticVisitor::VisitEmbeddedPointer(heap,
this);
429 }
else if (RelocInfo::IsCodeTarget(mode)) {
430 StaticVisitor::VisitCodeTarget(heap,
this);
431 }
else if (mode == RelocInfo::GLOBAL_PROPERTY_CELL) {
432 StaticVisitor::VisitGlobalPropertyCell(heap,
this);
433 }
else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
434 StaticVisitor::VisitExternalReference(
this);
436 #ifdef ENABLE_DEBUGGER_SUPPORT
437 }
else if (heap->isolate()->debug()->has_break_points() &&
438 ((RelocInfo::IsJSReturn(mode) &&
439 IsPatchedReturnSequence()) ||
440 (RelocInfo::IsDebugBreakSlot(mode) &&
441 IsPatchedDebugBreakSlotSequence()))) {
442 StaticVisitor::VisitDebugTarget(heap,
this);
445 StaticVisitor::VisitRuntimeEntry(
this);
453 void Operand::set_modrm(
int mod, Register rm_reg) {
455 buf_[0] = mod << 6 | rm_reg.low_bits();
457 rex_ |= rm_reg.high_bit();
461 void Operand::set_sib(
ScaleFactor scale, Register index, Register base) {
467 buf_[1] = (scale << 6) | (index.low_bits() << 3) | base.low_bits();
468 rex_ |= index.high_bit() << 1 | base.high_bit();
472 void Operand::set_disp8(
int disp) {
474 ASSERT(len_ == 1 || len_ == 2);
475 int8_t* p =
reinterpret_cast<int8_t*
>(&buf_[len_]);
477 len_ +=
sizeof(int8_t);
480 void Operand::set_disp32(
int disp) {
481 ASSERT(len_ == 1 || len_ == 2);
490 #endif // V8_X64_ASSEMBLER_X64_INL_H_
static Object *& Object_at(Address addr)
static Handle< Object > & Object_Handle_at(Address addr)
static HeapObject * cast(Object *obj)
#define ASSERT(condition)
static const int kPatchReturnSequenceAddressOffset
static uint16_t & uint16_at(Address addr)
static const int kRealPatchReturnSequenceAddressOffset
static const int kSpecialTargetSize
static Address & Address_at(Address addr)
static void set_target_address_at(Address pc, Address target)
static int32_t & int32_at(Address addr)
static Code * GetCodeFromTargetAddress(Address address)
static bool IsNop(Instr instr, int type=NON_MARKING_NOP)
static Address target_address_at(Address pc)
#define ASSERT_EQ(v1, v2)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination trace on stack replacement optimize closures functions with arguments object optimize functions containing for in loops profiler considers IC stability primitive functions trigger their own optimization re try self optimization if it failed insert an interrupt check at function exit execution budget before interrupt is triggered call count before self optimization self_optimization count_based_interrupts weighted_back_edges trace_opt emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 enable use of ARMv7 instructions if enable use of MIPS FPU instructions if NULL
static HeapObject * FromAddress(Address address)
static uint64_t & uint64_at(Address addr)
static uint32_t & uint32_at(Address addr)
Handle< Object > code_target_object_handle_at(Address pc)
#define RUNTIME_ENTRY(name, nargs, ressize)
static void FlushICache(void *start, size_t size)
static const int kValueOffset