28 #ifndef V8_X64_ASSEMBLER_X64_INL_H_
29 #define V8_X64_ASSEMBLER_X64_INL_H_
45 static const byte kCallOpcode = 0xE8;
46 static const int kNoCodeAgeSequenceLength = 6;
49 void Assembler::emitl(uint32_t x) {
51 pc_ +=
sizeof(uint32_t);
55 void Assembler::emitp(
void* x, RelocInfo::Mode rmode) {
56 uintptr_t value =
reinterpret_cast<uintptr_t
>(x);
58 if (!RelocInfo::IsNone(rmode)) {
59 RecordRelocInfo(rmode, value);
61 pc_ +=
sizeof(uintptr_t);
65 void Assembler::emitq(uint64_t x) {
67 pc_ +=
sizeof(uint64_t);
77 void Assembler::emit_code_target(Handle<Code> target,
78 RelocInfo::Mode rmode,
79 TypeFeedbackId ast_id) {
80 ASSERT(RelocInfo::IsCodeTarget(rmode) ||
81 rmode == RelocInfo::CODE_AGE_SEQUENCE);
82 if (rmode == RelocInfo::CODE_TARGET && !ast_id.IsNone()) {
83 RecordRelocInfo(RelocInfo::CODE_TARGET_WITH_ID, ast_id.ToInt());
85 RecordRelocInfo(rmode);
87 int current = code_targets_.length();
88 if (current > 0 && code_targets_.last().is_identical_to(target)) {
92 code_targets_.Add(target);
98 void Assembler::emit_runtime_entry(
Address entry, RelocInfo::Mode rmode) {
99 ASSERT(RelocInfo::IsRuntimeEntry(rmode));
101 RecordRelocInfo(rmode);
102 emitl(static_cast<uint32_t>(entry -
isolate()->code_range()->start()));
106 void Assembler::emit_rex_64(Register reg, Register rm_reg) {
107 emit(0x48 | reg.high_bit() << 2 | rm_reg.high_bit());
111 void Assembler::emit_rex_64(XMMRegister reg, Register rm_reg) {
112 emit(0x48 | (reg.code() & 0x8) >> 1 | rm_reg.code() >> 3);
116 void Assembler::emit_rex_64(Register reg, XMMRegister rm_reg) {
117 emit(0x48 | (reg.code() & 0x8) >> 1 | rm_reg.code() >> 3);
121 void Assembler::emit_rex_64(Register reg,
const Operand& op) {
122 emit(0x48 | reg.high_bit() << 2 | op.rex_);
126 void Assembler::emit_rex_64(XMMRegister reg,
const Operand& op) {
127 emit(0x48 | (reg.code() & 0x8) >> 1 | op.rex_);
131 void Assembler::emit_rex_64(Register rm_reg) {
132 ASSERT_EQ(rm_reg.code() & 0xf, rm_reg.code());
133 emit(0x48 | rm_reg.high_bit());
137 void Assembler::emit_rex_64(
const Operand& op) {
138 emit(0x48 | op.rex_);
142 void Assembler::emit_rex_32(Register reg, Register rm_reg) {
143 emit(0x40 | reg.high_bit() << 2 | rm_reg.high_bit());
147 void Assembler::emit_rex_32(Register reg,
const Operand& op) {
148 emit(0x40 | reg.high_bit() << 2 | op.rex_);
152 void Assembler::emit_rex_32(Register rm_reg) {
153 emit(0x40 | rm_reg.high_bit());
157 void Assembler::emit_rex_32(
const Operand& op) {
158 emit(0x40 | op.rex_);
162 void Assembler::emit_optional_rex_32(Register reg, Register rm_reg) {
163 byte rex_bits = reg.high_bit() << 2 | rm_reg.high_bit();
164 if (rex_bits != 0) emit(0x40 | rex_bits);
168 void Assembler::emit_optional_rex_32(Register reg,
const Operand& op) {
169 byte rex_bits = reg.high_bit() << 2 | op.rex_;
170 if (rex_bits != 0) emit(0x40 | rex_bits);
174 void Assembler::emit_optional_rex_32(XMMRegister reg,
const Operand& op) {
175 byte rex_bits = (reg.code() & 0x8) >> 1 | op.rex_;
176 if (rex_bits != 0) emit(0x40 | rex_bits);
180 void Assembler::emit_optional_rex_32(XMMRegister reg, XMMRegister base) {
181 byte rex_bits = (reg.code() & 0x8) >> 1 | (base.code() & 0x8) >> 3;
182 if (rex_bits != 0) emit(0x40 | rex_bits);
186 void Assembler::emit_optional_rex_32(XMMRegister reg, Register base) {
187 byte rex_bits = (reg.code() & 0x8) >> 1 | (base.code() & 0x8) >> 3;
188 if (rex_bits != 0) emit(0x40 | rex_bits);
192 void Assembler::emit_optional_rex_32(Register reg, XMMRegister base) {
193 byte rex_bits = (reg.code() & 0x8) >> 1 | (base.code() & 0x8) >> 3;
194 if (rex_bits != 0) emit(0x40 | rex_bits);
198 void Assembler::emit_optional_rex_32(Register rm_reg) {
199 if (rm_reg.high_bit()) emit(0x41);
203 void Assembler::emit_optional_rex_32(
const Operand& op) {
204 if (op.rex_ != 0) emit(0x40 | op.rex_);
209 ConstantPoolArray* constant_pool) {
215 ConstantPoolArray* constant_pool,
218 CPU::FlushICache(pc,
sizeof(
int32_t));
241 void RelocInfo::apply(intptr_t delta) {
242 if (IsInternalReference(rmode_)) {
245 CPU::FlushICache(pc_,
sizeof(
Address));
246 }
else if (IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_)) {
248 CPU::FlushICache(pc_,
sizeof(
int32_t));
249 }
else if (rmode_ == CODE_AGE_SEQUENCE) {
250 if (*pc_ == kCallOpcode) {
252 *p -=
static_cast<int32_t>(delta);
253 CPU::FlushICache(p,
sizeof(uint32_t));
259 Address RelocInfo::target_address() {
260 ASSERT(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
265 Address RelocInfo::target_address_address() {
266 ASSERT(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_)
267 || rmode_ == EMBEDDED_OBJECT
268 || rmode_ == EXTERNAL_REFERENCE);
269 return reinterpret_cast<Address>(pc_);
273 Address RelocInfo::constant_pool_entry_address() {
279 int RelocInfo::target_address_size() {
280 if (IsCodedSpecially()) {
289 ASSERT(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
293 host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
299 Object* RelocInfo::target_object() {
300 ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
305 Handle<Object> RelocInfo::target_object_handle(Assembler* origin) {
306 ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
307 if (rmode_ == EMBEDDED_OBJECT) {
310 return origin->code_target_object_handle_at(pc_);
315 Address RelocInfo::target_reference() {
316 ASSERT(rmode_ == RelocInfo::EXTERNAL_REFERENCE);
322 ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
323 ASSERT(!target->IsConsString());
325 CPU::FlushICache(pc_,
sizeof(
Address));
328 target->IsHeapObject()) {
329 host()->GetHeap()->incremental_marking()->RecordWrite(
335 Address RelocInfo::target_runtime_entry(Assembler* origin) {
336 ASSERT(IsRuntimeEntry(rmode_));
337 return origin->runtime_entry_at(pc_);
341 void RelocInfo::set_target_runtime_entry(
Address target,
343 ASSERT(IsRuntimeEntry(rmode_));
344 if (target_address() != target) set_target_address(target, mode);
348 Handle<Cell> RelocInfo::target_cell_handle() {
349 ASSERT(rmode_ == RelocInfo::CELL);
351 return Handle<Cell>(
reinterpret_cast<Cell**
>(address));
355 Cell* RelocInfo::target_cell() {
356 ASSERT(rmode_ == RelocInfo::CELL);
362 ASSERT(rmode_ == RelocInfo::CELL);
365 CPU::FlushICache(pc_,
sizeof(
Address));
370 host()->GetHeap()->incremental_marking()->RecordWrite(
376 void RelocInfo::WipeOut() {
377 if (IsEmbeddedObject(rmode_) || IsExternalReference(rmode_)) {
379 }
else if (IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_)) {
388 bool RelocInfo::IsPatchedReturnSequence() {
395 #ifdef ENABLE_DEBUGGER_SUPPORT
404 bool RelocInfo::IsPatchedDebugBreakSlotSequence() {
409 Handle<Object> RelocInfo::code_age_stub_handle(Assembler* origin) {
410 ASSERT(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
411 ASSERT(*pc_ == kCallOpcode);
412 return origin->code_target_object_handle_at(pc_ + 1);
416 Code* RelocInfo::code_age_stub() {
417 ASSERT(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
418 ASSERT(*pc_ == kCallOpcode);
424 void RelocInfo::set_code_age_stub(Code* stub) {
425 ASSERT(*pc_ == kCallOpcode);
426 ASSERT(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
431 Address RelocInfo::call_address() {
432 ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
433 (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
439 void RelocInfo::set_call_address(
Address target) {
440 ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
441 (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
446 if (host() !=
NULL) {
448 host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
454 Object* RelocInfo::call_object() {
455 return *call_object_address();
459 void RelocInfo::set_call_object(
Object* target) {
460 *call_object_address() = target;
464 Object** RelocInfo::call_object_address() {
465 ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
466 (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
467 return reinterpret_cast<Object**
>(
472 void RelocInfo::Visit(Isolate* isolate, ObjectVisitor* visitor) {
473 RelocInfo::Mode mode = rmode();
474 if (mode == RelocInfo::EMBEDDED_OBJECT) {
475 visitor->VisitEmbeddedPointer(
this);
476 CPU::FlushICache(pc_,
sizeof(
Address));
477 }
else if (RelocInfo::IsCodeTarget(mode)) {
478 visitor->VisitCodeTarget(
this);
479 }
else if (mode == RelocInfo::CELL) {
480 visitor->VisitCell(
this);
481 }
else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
482 visitor->VisitExternalReference(
this);
483 CPU::FlushICache(pc_,
sizeof(
Address));
484 }
else if (RelocInfo::IsCodeAgeSequence(mode)) {
485 visitor->VisitCodeAgeSequence(
this);
486 #ifdef ENABLE_DEBUGGER_SUPPORT
487 }
else if (((RelocInfo::IsJSReturn(mode) &&
488 IsPatchedReturnSequence()) ||
489 (RelocInfo::IsDebugBreakSlot(mode) &&
490 IsPatchedDebugBreakSlotSequence())) &&
491 isolate->debug()->has_break_points()) {
492 visitor->VisitDebugTarget(
this);
494 }
else if (RelocInfo::IsRuntimeEntry(mode)) {
495 visitor->VisitRuntimeEntry(
this);
500 template<
typename StaticVisitor>
501 void RelocInfo::Visit(Heap* heap) {
502 RelocInfo::Mode mode = rmode();
503 if (mode == RelocInfo::EMBEDDED_OBJECT) {
504 StaticVisitor::VisitEmbeddedPointer(heap,
this);
505 CPU::FlushICache(pc_,
sizeof(
Address));
506 }
else if (RelocInfo::IsCodeTarget(mode)) {
507 StaticVisitor::VisitCodeTarget(heap,
this);
508 }
else if (mode == RelocInfo::CELL) {
509 StaticVisitor::VisitCell(heap,
this);
510 }
else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
511 StaticVisitor::VisitExternalReference(
this);
512 CPU::FlushICache(pc_,
sizeof(
Address));
513 }
else if (RelocInfo::IsCodeAgeSequence(mode)) {
514 StaticVisitor::VisitCodeAgeSequence(heap,
this);
515 #ifdef ENABLE_DEBUGGER_SUPPORT
516 }
else if (heap->isolate()->debug()->has_break_points() &&
517 ((RelocInfo::IsJSReturn(mode) &&
518 IsPatchedReturnSequence()) ||
519 (RelocInfo::IsDebugBreakSlot(mode) &&
520 IsPatchedDebugBreakSlotSequence()))) {
521 StaticVisitor::VisitDebugTarget(heap,
this);
523 }
else if (RelocInfo::IsRuntimeEntry(mode)) {
524 StaticVisitor::VisitRuntimeEntry(
this);
532 void Operand::set_modrm(
int mod, Register rm_reg) {
534 buf_[0] = mod << 6 | rm_reg.low_bits();
536 rex_ |= rm_reg.high_bit();
540 void Operand::set_sib(
ScaleFactor scale, Register index, Register base) {
546 buf_[1] = (scale << 6) | (index.low_bits() << 3) | base.low_bits();
547 rex_ |= index.high_bit() << 1 | base.high_bit();
551 void Operand::set_disp8(
int disp) {
553 ASSERT(len_ == 1 || len_ == 2);
554 int8_t* p =
reinterpret_cast<int8_t*
>(&buf_[len_]);
556 len_ +=
sizeof(int8_t);
559 void Operand::set_disp32(
int disp) {
560 ASSERT(len_ == 1 || len_ == 2);
569 #endif // V8_X64_ASSEMBLER_X64_INL_H_
Address runtime_entry_at(Address pc)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter NULL
Isolate * isolate() const
static Object *& Object_at(Address addr)
static const int kValueOffset
static Handle< Object > & Object_Handle_at(Address addr)
static HeapObject * cast(Object *obj)
kSerializedDataOffset Object
static Address target_address_at(Address pc, ConstantPoolArray *constant_pool)
#define ASSERT(condition)
static const int kPatchReturnSequenceAddressOffset
static uint16_t & uint16_at(Address addr)
static const int kMoveAddressIntoScratchRegisterInstructionLength
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_string(expose_natives_as
static const int kRealPatchReturnSequenceAddressOffset
static const int kSpecialTargetSize
static Address & Address_at(Address addr)
static int32_t & int32_at(Address addr)
static Code * GetCodeFromTargetAddress(Address address)
static const int kCallTargetAddressOffset
static bool IsNop(Instr instr, int type=NON_MARKING_NOP)
static Address target_address_from_return_address(Address pc)
#define ASSERT_EQ(v1, v2)
static uint64_t & uint64_at(Address addr)
static void set_target_address_at(Address pc, ConstantPoolArray *constant_pool, Address target)
static uint32_t & uint32_at(Address addr)
Handle< Object > code_target_object_handle_at(Address pc)
static uintptr_t & uintptr_at(Address addr)
static Cell * FromValueAddress(Address value)