28 #ifndef V8_X64_ASSEMBLER_X64_INL_H_
29 #define V8_X64_ASSEMBLER_X64_INL_H_
45 void Assembler::emitl(uint32_t x) {
47 pc_ +=
sizeof(uint32_t);
51 void Assembler::emitq(uint64_t x, RelocInfo::Mode rmode) {
54 RecordRelocInfo(rmode, x);
56 pc_ +=
sizeof(uint64_t);
66 void Assembler::emit_code_target(Handle<Code> target,
67 RelocInfo::Mode rmode,
68 TypeFeedbackId ast_id) {
69 ASSERT(RelocInfo::IsCodeTarget(rmode));
70 if (rmode == RelocInfo::CODE_TARGET && !ast_id.IsNone()) {
71 RecordRelocInfo(RelocInfo::CODE_TARGET_WITH_ID, ast_id.ToInt());
73 RecordRelocInfo(rmode);
75 int current = code_targets_.length();
76 if (current > 0 && code_targets_.last().is_identical_to(target)) {
80 code_targets_.Add(target);
86 void Assembler::emit_rex_64(Register reg, Register rm_reg) {
87 emit(0x48 | reg.high_bit() << 2 | rm_reg.high_bit());
91 void Assembler::emit_rex_64(XMMRegister reg, Register rm_reg) {
92 emit(0x48 | (reg.code() & 0x8) >> 1 | rm_reg.code() >> 3);
96 void Assembler::emit_rex_64(Register reg, XMMRegister rm_reg) {
97 emit(0x48 | (reg.code() & 0x8) >> 1 | rm_reg.code() >> 3);
101 void Assembler::emit_rex_64(Register reg,
const Operand& op) {
102 emit(0x48 | reg.high_bit() << 2 | op.rex_);
106 void Assembler::emit_rex_64(XMMRegister reg,
const Operand& op) {
107 emit(0x48 | (reg.code() & 0x8) >> 1 | op.rex_);
111 void Assembler::emit_rex_64(Register rm_reg) {
112 ASSERT_EQ(rm_reg.code() & 0xf, rm_reg.code());
113 emit(0x48 | rm_reg.high_bit());
117 void Assembler::emit_rex_64(
const Operand& op) {
118 emit(0x48 | op.rex_);
122 void Assembler::emit_rex_32(Register reg, Register rm_reg) {
123 emit(0x40 | reg.high_bit() << 2 | rm_reg.high_bit());
127 void Assembler::emit_rex_32(Register reg,
const Operand& op) {
128 emit(0x40 | reg.high_bit() << 2 | op.rex_);
132 void Assembler::emit_rex_32(Register rm_reg) {
133 emit(0x40 | rm_reg.high_bit());
137 void Assembler::emit_rex_32(
const Operand& op) {
138 emit(0x40 | op.rex_);
142 void Assembler::emit_optional_rex_32(Register reg, Register rm_reg) {
143 byte rex_bits = reg.high_bit() << 2 | rm_reg.high_bit();
144 if (rex_bits != 0) emit(0x40 | rex_bits);
148 void Assembler::emit_optional_rex_32(Register reg,
const Operand& op) {
149 byte rex_bits = reg.high_bit() << 2 | op.rex_;
150 if (rex_bits != 0) emit(0x40 | rex_bits);
154 void Assembler::emit_optional_rex_32(XMMRegister reg,
const Operand& op) {
155 byte rex_bits = (reg.code() & 0x8) >> 1 | op.rex_;
156 if (rex_bits != 0) emit(0x40 | rex_bits);
160 void Assembler::emit_optional_rex_32(XMMRegister reg, XMMRegister base) {
161 byte rex_bits = (reg.code() & 0x8) >> 1 | (base.code() & 0x8) >> 3;
162 if (rex_bits != 0) emit(0x40 | rex_bits);
166 void Assembler::emit_optional_rex_32(XMMRegister reg, Register base) {
167 byte rex_bits = (reg.code() & 0x8) >> 1 | (base.code() & 0x8) >> 3;
168 if (rex_bits != 0) emit(0x40 | rex_bits);
172 void Assembler::emit_optional_rex_32(Register reg, XMMRegister base) {
173 byte rex_bits = (reg.code() & 0x8) >> 1 | (base.code() & 0x8) >> 3;
174 if (rex_bits != 0) emit(0x40 | rex_bits);
178 void Assembler::emit_optional_rex_32(Register rm_reg) {
179 if (rm_reg.high_bit()) emit(0x41);
183 void Assembler::emit_optional_rex_32(
const Operand& op) {
184 if (op.rex_ != 0) emit(0x40 | op.rex_);
212 void RelocInfo::apply(intptr_t delta) {
213 if (IsInternalReference(rmode_)) {
217 }
else if (IsCodeTarget(rmode_)) {
224 Address RelocInfo::target_address() {
226 if (IsCodeTarget(rmode_)) {
234 Address RelocInfo::target_address_address() {
236 || rmode_ == EMBEDDED_OBJECT
237 || rmode_ == EXTERNAL_REFERENCE);
238 return reinterpret_cast<Address>(pc_);
242 int RelocInfo::target_address_size() {
243 if (IsCodedSpecially()) {
253 if (IsCodeTarget(rmode_)) {
257 host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
267 Object* RelocInfo::target_object() {
268 ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
273 Handle<Object> RelocInfo::target_object_handle(Assembler* origin) {
274 ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
275 if (rmode_ == EMBEDDED_OBJECT) {
278 return origin->code_target_object_handle_at(pc_);
283 Object** RelocInfo::target_object_address() {
284 ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
285 return reinterpret_cast<Object**
>(pc_);
289 Address* RelocInfo::target_reference_address() {
290 ASSERT(rmode_ == RelocInfo::EXTERNAL_REFERENCE);
291 return reinterpret_cast<Address*
>(pc_);
296 ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
301 target->IsHeapObject()) {
302 host()->GetHeap()->incremental_marking()->RecordWrite(
308 Handle<JSGlobalPropertyCell> RelocInfo::target_cell_handle() {
309 ASSERT(rmode_ == RelocInfo::GLOBAL_PROPERTY_CELL);
311 return Handle<JSGlobalPropertyCell>(
312 reinterpret_cast<JSGlobalPropertyCell**
>(address));
316 JSGlobalPropertyCell* RelocInfo::target_cell() {
317 ASSERT(rmode_ == RelocInfo::GLOBAL_PROPERTY_CELL);
322 void RelocInfo::set_target_cell(JSGlobalPropertyCell* cell,
324 ASSERT(rmode_ == RelocInfo::GLOBAL_PROPERTY_CELL);
332 host()->GetHeap()->incremental_marking()->RecordWrite(
338 bool RelocInfo::IsPatchedReturnSequence() {
345 #ifdef ENABLE_DEBUGGER_SUPPORT
346 return pc_[10] != 0xCC;
353 bool RelocInfo::IsPatchedDebugBreakSlotSequence() {
358 Address RelocInfo::call_address() {
359 ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
360 (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
366 void RelocInfo::set_call_address(
Address target) {
367 ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
368 (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
373 if (host() !=
NULL) {
375 host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
381 Object* RelocInfo::call_object() {
382 return *call_object_address();
386 void RelocInfo::set_call_object(
Object* target) {
387 *call_object_address() = target;
391 Object** RelocInfo::call_object_address() {
392 ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
393 (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
394 return reinterpret_cast<Object**
>(
399 void RelocInfo::Visit(ObjectVisitor* visitor) {
400 RelocInfo::Mode mode = rmode();
401 if (mode == RelocInfo::EMBEDDED_OBJECT) {
402 visitor->VisitEmbeddedPointer(
this);
404 }
else if (RelocInfo::IsCodeTarget(mode)) {
405 visitor->VisitCodeTarget(
this);
406 }
else if (mode == RelocInfo::GLOBAL_PROPERTY_CELL) {
407 visitor->VisitGlobalPropertyCell(
this);
408 }
else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
409 visitor->VisitExternalReference(
this);
411 #ifdef ENABLE_DEBUGGER_SUPPORT
413 }
else if (((RelocInfo::IsJSReturn(mode) &&
414 IsPatchedReturnSequence()) ||
415 (RelocInfo::IsDebugBreakSlot(mode) &&
416 IsPatchedDebugBreakSlotSequence())) &&
417 Isolate::Current()->debug()->has_break_points()) {
418 visitor->VisitDebugTarget(
this);
421 visitor->VisitRuntimeEntry(
this);
426 template<
typename StaticVisitor>
427 void RelocInfo::Visit(Heap* heap) {
428 RelocInfo::Mode mode = rmode();
429 if (mode == RelocInfo::EMBEDDED_OBJECT) {
430 StaticVisitor::VisitEmbeddedPointer(heap,
this);
432 }
else if (RelocInfo::IsCodeTarget(mode)) {
433 StaticVisitor::VisitCodeTarget(heap,
this);
434 }
else if (mode == RelocInfo::GLOBAL_PROPERTY_CELL) {
435 StaticVisitor::VisitGlobalPropertyCell(heap,
this);
436 }
else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
437 StaticVisitor::VisitExternalReference(
this);
439 #ifdef ENABLE_DEBUGGER_SUPPORT
440 }
else if (heap->isolate()->debug()->has_break_points() &&
441 ((RelocInfo::IsJSReturn(mode) &&
442 IsPatchedReturnSequence()) ||
443 (RelocInfo::IsDebugBreakSlot(mode) &&
444 IsPatchedDebugBreakSlotSequence()))) {
445 StaticVisitor::VisitDebugTarget(heap,
this);
448 StaticVisitor::VisitRuntimeEntry(
this);
456 void Operand::set_modrm(
int mod, Register rm_reg) {
458 buf_[0] = mod << 6 | rm_reg.low_bits();
460 rex_ |= rm_reg.high_bit();
464 void Operand::set_sib(
ScaleFactor scale, Register index, Register base) {
470 buf_[1] = (scale << 6) | (index.low_bits() << 3) | base.low_bits();
471 rex_ |= index.high_bit() << 1 | base.high_bit();
475 void Operand::set_disp8(
int disp) {
477 ASSERT(len_ == 1 || len_ == 2);
478 int8_t* p =
reinterpret_cast<int8_t*
>(&buf_[len_]);
480 len_ +=
sizeof(int8_t);
483 void Operand::set_disp32(
int disp) {
484 ASSERT(len_ == 1 || len_ == 2);
493 #endif // V8_X64_ASSEMBLER_X64_INL_H_
static Object *& Object_at(Address addr)
static Handle< Object > & Object_Handle_at(Address addr)
static HeapObject * cast(Object *obj)
#define ASSERT(condition)
static const int kPatchReturnSequenceAddressOffset
static uint16_t & uint16_at(Address addr)
static const int kRealPatchReturnSequenceAddressOffset
static const int kSpecialTargetSize
static Address & Address_at(Address addr)
static void set_target_address_at(Address pc, Address target)
static int32_t & int32_at(Address addr)
static Code * GetCodeFromTargetAddress(Address address)
static const int kCallTargetAddressOffset
static bool IsNop(Instr instr, int type=NON_MARKING_NOP)
static Address target_address_at(Address pc)
static JSGlobalPropertyCell * FromValueAddress(Address value)
static Address target_address_from_return_address(Address pc)
#define ASSERT_EQ(v1, v2)
static uint64_t & uint64_at(Address addr)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if NULL
static uint32_t & uint32_at(Address addr)
Handle< Object > code_target_object_handle_at(Address pc)
#define RUNTIME_ENTRY(name, nargs, ressize)
static void FlushICache(void *start, size_t size)
static const int kValueOffset