37 #ifndef V8_X64_ASSEMBLER_X64_H_
38 #define V8_X64_ASSEMBLER_X64_H_
49 static const uint64_t
kMaxUInt32 = V8_UINT64_C(0xffffffff);
50 return static_cast<uint64_t
>(x) <= kMaxUInt32;
54 static const int64_t kMinInt32 = -V8_INT64_C(0x80000000);
59 static const uint64_t kMaxInt32 = V8_UINT64_C(0x7fffffff);
60 return x <= kMaxInt32;
64 static const uint64_t
kMaxUInt32 = V8_UINT64_C(0xffffffff);
102 return kAllocationIndexByRegisterCode[reg.code()];
107 Register result = { kRegisterCodeByAllocationIndex[index] };
113 const char*
const names[] = {
161 static const int kAllocationIndexByRegisterCode[
kNumRegisters];
207 return reg.code() - 1;
218 const char*
const names[] = {
265 const XMMRegister
xmm0 = { 0 };
266 const XMMRegister
xmm1 = { 1 };
267 const XMMRegister
xmm2 = { 2 };
268 const XMMRegister
xmm3 = { 3 };
269 const XMMRegister
xmm4 = { 4 };
270 const XMMRegister
xmm5 = { 5 };
271 const XMMRegister
xmm6 = { 6 };
272 const XMMRegister
xmm7 = { 7 };
386 Operand(Register base,
int32_t disp);
389 Operand(Register base,
395 Operand(Register index,
402 Operand(
const Operand& base,
int32_t offset);
406 bool AddressUsesRegister(Register reg)
const;
424 inline void set_modrm(
int mod,
Register rm);
431 inline void set_disp8(
int disp);
432 inline void set_disp32(
int disp);
447 class CpuFeatures :
public AllStatic {
456 if (f ==
SSE2 && !FLAG_enable_sse2)
return false;
457 if (f ==
SSE3 && !FLAG_enable_sse3)
return false;
458 if (f ==
SSE4_1 && !FLAG_enable_sse4_1)
return false;
459 if (f ==
CMOV && !FLAG_enable_cmov)
return false;
460 if (f ==
RDTSC && !FLAG_enable_rdtsc)
return false;
461 if (f ==
SAHF && !FLAG_enable_sahf)
return false;
462 return (supported_ & (V8_UINT64_C(1) << f)) != 0;
469 Isolate* isolate = Isolate::UncheckedCurrent();
470 if (isolate ==
NULL) {
475 uint64_t enabled = isolate->enabled_cpu_features();
476 return (enabled & (V8_UINT64_C(1) << f)) != 0;
486 uint64_t mask = V8_UINT64_C(1) << f;
489 (CpuFeatures::found_by_runtime_probing_ & mask) == 0);
490 isolate_ = Isolate::UncheckedCurrent();
492 if (isolate_ !=
NULL) {
493 old_enabled_ = isolate_->enabled_cpu_features();
494 isolate_->set_enabled_cpu_features(old_enabled_ | mask);
498 ASSERT_EQ(Isolate::UncheckedCurrent(), isolate_);
499 if (isolate_ !=
NULL) {
500 isolate_->set_enabled_cpu_features(old_enabled_);
506 uint64_t old_enabled_;
519 static const uint64_t kDefaultCpuFeatures = (1 <<
SSE2 | 1 <<
CMOV);
522 static bool initialized_;
524 static uint64_t supported_;
525 static uint64_t found_by_runtime_probing_;
531 class Assembler :
public AssemblerBase {
542 static const int kGap = 32;
599 *
reinterpret_cast<Address*
>(instruction_payload) = target;
670 void Nop(
int bytes = 1);
678 void push(Immediate value);
683 void push(
const Operand& src);
686 void pop(
const Operand& dst);
688 void enter(Immediate size);
703 void movl(
const Operand& dst, Immediate imm);
716 void movl(
const Operand& dst, Label* src);
719 void movq(
const Operand& dst, Immediate value);
722 void movq(
Register dst,
void* ptr, RelocInfo::Mode rmode);
723 void movq(
Register dst, int64_t value, RelocInfo::Mode rmode);
724 void movq(
Register dst,
const char* s, RelocInfo::Mode rmode);
746 void load_rax(
void* ptr, RelocInfo::Mode rmode);
747 void load_rax(ExternalReference ext);
760 arithmetic_op_32(0x03, dst, src);
764 immediate_arithmetic_op_32(0x0, dst, src);
768 arithmetic_op_32(0x03, dst, src);
771 void addl(
const Operand& dst, Immediate src) {
772 immediate_arithmetic_op_32(0x0, dst, src);
776 arithmetic_op_32(0x01, src, dst);
780 arithmetic_op(0x03, dst, src);
784 arithmetic_op(0x03, dst, src);
788 arithmetic_op(0x01, src, dst);
792 immediate_arithmetic_op(0x0, dst, src);
795 void addq(
const Operand& dst, Immediate src) {
796 immediate_arithmetic_op(0x0, dst, src);
800 arithmetic_op_32(0x1b, dst, src);
804 arithmetic_op(0x1b, dst, src);
808 immediate_arithmetic_op_8(0x7, dst, src);
814 arithmetic_op(0x3A, dst, src);
818 arithmetic_op(0x3A, dst, src);
822 arithmetic_op(0x38, src, dst);
825 void cmpb(
const Operand& dst, Immediate src) {
826 immediate_arithmetic_op_8(0x7, dst, src);
829 void cmpw(
const Operand& dst, Immediate src) {
830 immediate_arithmetic_op_16(0x7, dst, src);
834 immediate_arithmetic_op_16(0x7, dst, src);
838 arithmetic_op_16(0x3B, dst, src);
842 arithmetic_op_16(0x3B, dst, src);
846 arithmetic_op_16(0x39, src, dst);
850 arithmetic_op_32(0x3B, dst, src);
854 arithmetic_op_32(0x3B, dst, src);
858 arithmetic_op_32(0x39, src, dst);
862 immediate_arithmetic_op_32(0x7, dst, src);
865 void cmpl(
const Operand& dst, Immediate src) {
866 immediate_arithmetic_op_32(0x7, dst, src);
870 arithmetic_op(0x3B, dst, src);
874 arithmetic_op(0x3B, dst, src);
878 arithmetic_op(0x39, src, dst);
882 immediate_arithmetic_op(0x7, dst, src);
885 void cmpq(
const Operand& dst, Immediate src) {
886 immediate_arithmetic_op(0x7, dst, src);
890 arithmetic_op(0x23, dst, src);
894 arithmetic_op(0x23, dst, src);
898 arithmetic_op(0x21, src, dst);
902 immediate_arithmetic_op(0x4, dst, src);
905 void and_(
const Operand& dst, Immediate src) {
906 immediate_arithmetic_op(0x4, dst, src);
910 immediate_arithmetic_op_32(0x4, dst, src);
914 arithmetic_op_32(0x23, dst, src);
918 arithmetic_op_32(0x23, dst, src);
922 immediate_arithmetic_op_8(0x4, dst, src);
926 void decq(
const Operand& dst);
928 void decl(
const Operand& dst);
930 void decb(
const Operand& dst);
953 void incq(
const Operand& dst);
955 void incl(
const Operand& dst);
964 void neg(
const Operand& dst);
968 void not_(
const Operand& dst);
972 arithmetic_op(0x0B, dst, src);
976 arithmetic_op_32(0x0B, dst, src);
980 arithmetic_op(0x0B, dst, src);
984 arithmetic_op_32(0x0B, dst, src);
988 arithmetic_op(0x09, src, dst);
992 immediate_arithmetic_op(0x1, dst, src);
996 immediate_arithmetic_op_32(0x1, dst, src);
999 void or_(
const Operand& dst, Immediate src) {
1000 immediate_arithmetic_op(0x1, dst, src);
1003 void orl(
const Operand& dst, Immediate src) {
1004 immediate_arithmetic_op_32(0x1, dst, src);
1009 shift(dst, imm8, 0x2);
1013 shift(dst, imm8, 0x0);
1017 shift(dst, imm8, 0x3);
1021 shift(dst, imm8, 0x1);
1033 shift(dst, shift_amount, 0x7);
1039 shift_32(dst, shift_amount, 0x7);
1053 shift(dst, shift_amount, 0x4);
1065 shift_32(dst, shift_amount, 0x4);
1069 shift(dst, shift_amount, 0x5);
1081 shift_32(dst, shift_amount, 0x5);
1084 void store_rax(
void* dst, RelocInfo::Mode mode);
1088 arithmetic_op(0x2B, dst, src);
1092 arithmetic_op(0x2B, dst, src);
1096 arithmetic_op(0x29, src, dst);
1100 immediate_arithmetic_op(0x5, dst, src);
1103 void subq(
const Operand& dst, Immediate src) {
1104 immediate_arithmetic_op(0x5, dst, src);
1108 arithmetic_op_32(0x2B, dst, src);
1112 arithmetic_op_32(0x2B, dst, src);
1115 void subl(
const Operand& dst, Immediate src) {
1116 immediate_arithmetic_op_32(0x5, dst, src);
1120 immediate_arithmetic_op_32(0x5, dst, src);
1124 immediate_arithmetic_op_8(0x5, dst, src);
1129 void testb(
const Operand& op, Immediate mask);
1133 void testl(
const Operand& op, Immediate mask);
1139 if (dst.code() == src.code()) {
1140 arithmetic_op_32(0x33, dst, src);
1142 arithmetic_op(0x33, dst, src);
1147 arithmetic_op_32(0x33, dst, src);
1151 arithmetic_op_32(0x33, dst, src);
1155 immediate_arithmetic_op_32(0x6, dst, src);
1158 void xorl(
const Operand& dst, Immediate src) {
1159 immediate_arithmetic_op_32(0x6, dst, src);
1163 arithmetic_op(0x33, dst, src);
1167 arithmetic_op(0x31, src, dst);
1171 immediate_arithmetic_op(0x6, dst, src);
1174 void xor_(
const Operand& dst, Immediate src) {
1175 immediate_arithmetic_op(0x6, dst, src);
1190 void ret(
int imm16);
1208 void bind(Label*
L);
1212 void call(Label*
L);
1214 RelocInfo::Mode rmode = RelocInfo::CODE_TARGET,
1227 void call(
const Operand& operand);
1233 void jmp(Label*
L, Label::Distance distance = Label::kFar);
1240 void jmp(
const Operand& src);
1245 Label::Distance distance = Label::kFar);
1256 void fld_s(
const Operand& adr);
1257 void fld_d(
const Operand& adr);
1259 void fstp_s(
const Operand& adr);
1260 void fstp_d(
const Operand& adr);
1261 void fstp(
int index);
1263 void fild_s(
const Operand& adr);
1264 void fild_d(
const Operand& adr);
1266 void fist_s(
const Operand& adr);
1268 void fistp_s(
const Operand& adr);
1269 void fistp_d(
const Operand& adr);
1282 void fisub_s(
const Operand& adr);
1284 void faddp(
int i = 1);
1285 void fsubp(
int i = 1);
1287 void fmulp(
int i = 1);
1288 void fdivp(
int i = 1);
1292 void fxch(
int i = 1);
1294 void ffree(
int i = 0);
1416 void db(uint8_t data);
1417 void dd(uint32_t data);
1419 int pc_offset()
const {
return static_cast<int>(pc_ - buffer_); }
1427 return pc_ >= reloc_info_writer.pos() - kGap;
1432 return static_cast<int>(reloc_info_writer.pos() - pc_);
1450 uint32_t long_at(
int pos) {
1451 return *
reinterpret_cast<uint32_t*
>(
addr_at(pos));
1453 void long_at_put(
int pos, uint32_t x) {
1454 *
reinterpret_cast<uint32_t*
>(
addr_at(pos)) = x;
1460 void emit(
byte x) { *pc_++ = x; }
1461 inline void emitl(uint32_t x);
1462 inline void emitq(uint64_t x, RelocInfo::Mode rmode);
1464 inline void emit_code_target(Handle<Code> target,
1465 RelocInfo::Mode rmode,
1467 void emit(Immediate x) { emitl(x.value_); }
1473 inline void emit_rex_64(XMMRegister reg, Register rm_reg);
1474 inline void emit_rex_64(Register reg, XMMRegister rm_reg);
1475 inline void emit_rex_64(Register reg, Register rm_reg);
1482 inline void emit_rex_64(Register reg,
const Operand& op);
1483 inline void emit_rex_64(XMMRegister reg,
const Operand& op);
1489 inline void emit_rex_64(Register rm_reg);
1496 inline void emit_rex_64(
const Operand& op);
1499 void emit_rex_64() { emit(0x48); }
1503 inline void emit_rex_32(Register reg, Register rm_reg);
1508 inline void emit_rex_32(Register reg,
const Operand& op);
1512 inline void emit_rex_32(Register rm_reg);
1516 inline void emit_rex_32(
const Operand& op);
1520 inline void emit_optional_rex_32(Register reg, Register rm_reg);
1526 inline void emit_optional_rex_32(Register reg,
const Operand& op);
1530 inline void emit_optional_rex_32(XMMRegister reg, XMMRegister base);
1534 inline void emit_optional_rex_32(XMMRegister reg, Register base);
1538 inline void emit_optional_rex_32(Register reg, XMMRegister base);
1542 inline void emit_optional_rex_32(XMMRegister reg,
const Operand& op);
1546 inline void emit_optional_rex_32(Register rm_reg);
1550 inline void emit_optional_rex_32(
const Operand& op);
1557 void emit_operand(Register reg,
const Operand& adr) {
1558 emit_operand(reg.low_bits(), adr);
1564 void emit_operand(
int rm,
const Operand& adr);
1567 void emit_modrm(Register reg, Register rm_reg) {
1568 emit(0xC0 | reg.low_bits() << 3 | rm_reg.low_bits());
1573 void emit_modrm(
int code, Register rm_reg) {
1575 emit(0xC0 | code << 3 | rm_reg.low_bits());
1579 inline void emit_code_relative_offset(Label* label);
1585 void arithmetic_op_16(
byte opcode, Register reg, Register rm_reg);
1586 void arithmetic_op_16(
byte opcode, Register reg,
const Operand& rm_reg);
1587 void arithmetic_op_32(
byte opcode, Register reg, Register rm_reg);
1588 void arithmetic_op_32(
byte opcode, Register reg,
const Operand& rm_reg);
1589 void arithmetic_op(
byte opcode, Register reg, Register rm_reg);
1590 void arithmetic_op(
byte opcode, Register reg,
const Operand& rm_reg);
1591 void immediate_arithmetic_op(
byte subcode, Register dst, Immediate src);
1592 void immediate_arithmetic_op(
byte subcode,
const Operand& dst, Immediate src);
1594 void immediate_arithmetic_op_8(
byte subcode,
1597 void immediate_arithmetic_op_8(
byte subcode,
1601 void immediate_arithmetic_op_16(
byte subcode,
1604 void immediate_arithmetic_op_16(
byte subcode,
1608 void immediate_arithmetic_op_32(
byte subcode,
1611 void immediate_arithmetic_op_32(
byte subcode,
1616 void shift(Register dst, Immediate shift_amount,
int subcode);
1617 void shift_32(Register dst, Immediate shift_amount,
int subcode);
1619 void shift(Register dst,
int subcode);
1620 void shift_32(Register dst,
int subcode);
1622 void emit_farith(
int b1,
int b2,
int i);
1626 void bind_to(Label*
L,
int pos);
1629 void RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data = 0);
1644 RelocInfoWriter reloc_info_writer;
1650 bool emit_debug_code_;
1651 bool predictable_code_size_;
1664 if (assembler_->buffer_overflow()) assembler_->GrowBuffer();
1666 space_before_ = assembler_->available_space();
1672 int bytes_generated = space_before_ - assembler_->available_space();
1673 ASSERT(bytes_generated < assembler_->kGap);
1678 Assembler* assembler_;
1686 #endif // V8_X64_ASSEMBLER_X64_H_
void shl(Register dst, Immediate shift_amount)
void xor_(Register dst, Immediate src)
void cvtlsi2ss(XMMRegister dst, Register src)
void movapd(XMMRegister dst, XMMRegister src)
static const int kMaximalBufferSize
void cmpb(const Operand &dst, Register src)
void xorl(Register dst, const Operand &src)
void or_(Register dst, Immediate src)
void addq(Register dst, Immediate src)
void load_rax(void *ptr, RelocInfo::Mode rmode)
const int kRegister_r14_Code
const int kRegister_rsp_Code
void addq(const Operand &dst, Register src)
static const byte kJccShortPrefix
void ucomisd(XMMRegister dst, XMMRegister src)
void cvttss2si(Register dst, const Operand &src)
void movzxbl(Register dst, const Operand &src)
void cmpw(Register dst, const Operand &src)
void or_(Register dst, Register src)
void ror(Register dst, Immediate imm8)
void roundsd(XMMRegister dst, XMMRegister src, RoundingMode mode)
void or_(const Operand &dst, Register src)
void addq(Register dst, const Operand &src)
bool buffer_overflow() const
void mulsd(XMMRegister dst, XMMRegister src)
bool is_byte_register() const
void cvtsd2si(Register dst, XMMRegister src)
static TypeFeedbackId None()
void cmpl(Register dst, Register src)
const int kRegister_r13_Code
void andl(Register dst, const Operand &src)
void and_(Register dst, const Operand &src)
const int kRegister_rbp_Code
void orpd(XMMRegister dst, XMMRegister src)
static const int kPatchDebugBreakSlotReturnOffset
int SizeOfCodeGeneratedSince(Label *label)
void cmpw(const Operand &dst, Immediate src)
void addq(Register dst, Register src)
void push(Register src, Condition cond=al)
void subl(Register dst, Register src)
void xorl(const Operand &dst, Immediate src)
void cvtss2sd(XMMRegister dst, XMMRegister src)
void orl(Register dst, Immediate src)
void sqrtsd(XMMRegister dst, XMMRegister src)
static XMMRegister FromAllocationIndex(int index)
void xorl(Register dst, Immediate src)
static bool IsSupported(CpuFeature f)
void andpd(XMMRegister dst, XMMRegister src)
static const int kCallInstructionLength
void subq(const Operand &dst, Register src)
bool predictable_code_size() const
void xor_(Register dst, const Operand &src)
void j(Condition cc, Label *L, Label::Distance distance=Label::kFar)
static Register FromAllocationIndex(int index)
static const int kMinimalBufferSize
void cmpq(Register dst, const Operand &src)
#define ASSERT(condition)
static const int kPatchReturnSequenceAddressOffset
void subq(Register dst, Register src)
void cvtlsi2sd(XMMRegister dst, const Operand &src)
static const int kShortCallInstructionLength
void and_(const Operand &dst, Register src)
void addl(Register dst, Register src)
void set_predictable_code_size(bool value)
static const char * AllocationIndexToString(int index)
void movsxlq(Register dst, Register src)
void xorpd(XMMRegister dst, XMMRegister src)
void cmpl(Register dst, Immediate src)
StringInputBuffer *const buffer_
void bt(const Operand &dst, Register src)
bool is_uint32(int64_t x)
void sarl_cl(Register dst)
void orl(Register dst, const Operand &src)
static const byte kTestAlByte
void shll(Register dst, Immediate shift_amount)
void cmovl(Condition cc, Register dst, Register src)
void testb(Register dst, Register src)
const int kRegister_rcx_Code
void cmpb(Register dst, Register src)
void fistp_s(const Operand &adr)
void addsd(XMMRegister dst, XMMRegister src)
const int kRegister_r12_Code
void shr_cl(Register dst)
void fld_d(const Operand &adr)
void addq(const Operand &dst, Immediate src)
static const int kNumRegisters
void imull(Register dst, Register src)
void cmpb_al(const Operand &op)
void xchg(Register dst, Register src)
void fild_s(const Operand &adr)
EnsureSpace(Assembler *assembler)
void orl(Register dst, Register src)
void cmpl(const Operand &dst, Immediate src)
void shr(Register dst, Immediate shift_amount)
static const char * AllocationIndexToString(int index)
void enter(const Immediate &size)
Condition ReverseCondition(Condition cond)
static const int kJumpInstructionLength
static const byte kJcShortOpcode
void testl(Register dst, Register src)
void movzxbq(Register dst, const Operand &src)
void shld(Register dst, Register src)
void subl(Register dst, const Operand &src)
void sbbl(Register dst, Register src)
DwVfpRegister DoubleRegister
void fisttp_d(const Operand &adr)
static const int kRealPatchReturnSequenceAddressOffset
void movss(XMMRegister dst, const Operand &src)
void movb(Register dst, const Operand &src)
void set_byte_at(int pos, byte value)
void cvtsd2ss(XMMRegister dst, XMMRegister src)
static const int kSpecialTargetSize
void cmpl(Register dst, const Operand &src)
const int kRegister_r11_Code
void or_(const Operand &dst, Immediate src)
void movsd(XMMRegister dst, XMMRegister src)
void GetCode(CodeDesc *desc)
void movdqa(XMMRegister dst, const Operand &src)
void andl(Register dst, Register src)
void rol(Register dst, Immediate imm8)
static const int kJSReturnSequenceLength
void sar(Register dst, Immediate shift_amount)
static const byte kNopByte
int available_space() const
void movsxbq(Register dst, const Operand &src)
static void set_target_address_at(Address pc, Address target)
static const byte kJzShortOpcode
void movmskpd(Register dst, XMMRegister src)
void subq(const Operand &dst, Immediate src)
void fisttp_s(const Operand &adr)
void subq(Register dst, Immediate src)
void addl(const Operand &dst, Register src)
void movzxwq(Register dst, const Operand &src)
void subb(Register dst, Immediate src)
void cmpb(const Operand &dst, Immediate src)
static void set_external_target_at(Address instruction_payload, Address target)
void cmpq(Register dst, Immediate src)
void movzxwl(Register dst, const Operand &src)
void addl(Register dst, Immediate src)
static Register from_code(int code)
void set_emit_debug_code(bool value)
const int kRegister_r8_Code
void cmpb(Register dst, Immediate src)
void emit_sse_operand(XMMRegister reg, const Operand &adr)
void cvtqsi2sd(XMMRegister dst, const Operand &src)
const int kRegister_r15_Code
void xorps(XMMRegister dst, XMMRegister src)
static const int kCallTargetAddressOffset
const int kRegister_rsi_Code
void subl(const Operand &dst, Immediate src)
void andb(Register dst, Immediate src)
static const int kDebugBreakSlotLength
void setcc(Condition cc, Register reg)
const int kRegister_rbx_Code
void fld_s(const Operand &adr)
static bool IsNop(Instr instr, int type=NON_MARKING_NOP)
void RecordDebugBreakSlot()
const int kRegister_r10_Code
void fstp_d(const Operand &adr)
void movw(Register reg, uint32_t immediate, Condition cond=al)
static const int kNumAllocatableRegisters
static int ToAllocationIndex(XMMRegister reg)
void store_rax(void *dst, RelocInfo::Mode mode)
void cmpq(const Operand &dst, Immediate src)
void push_imm32(int32_t imm32)
void fistp_d(const Operand &adr)
bool is(Register reg) const
static Address target_address_at(Address pc)
void cvtsd2siq(Register dst, XMMRegister src)
static const byte kTestEaxByte
void orl(const Operand &dst, Immediate src)
static const byte kJncShortOpcode
void shrd(Register dst, Register src)
void movaps(XMMRegister dst, XMMRegister src)
void rcr(Register dst, Immediate imm8)
void RecordComment(const char *msg)
static void deserialization_set_special_target_at(Address instruction_payload, Address target)
void fstp_s(const Operand &adr)
void divsd(XMMRegister dst, XMMRegister src)
void shrl_cl(Register dst)
void lea(Register dst, const Operand &src)
static Address target_address_from_return_address(Address pc)
void fild_d(const Operand &adr)
void shll_cl(Register dst)
friend class PositionsRecorder
void shrl(Register dst, Immediate shift_amount)
static const int kNumAllocatableRegisters
void andl(Register dst, Immediate src)
Assembler(Isolate *isolate, void *buffer, int buffer_size)
void xorl(Register dst, Register src)
void movl(Register dst, Register src)
Condition NegateCondition(Condition cond)
#define ASSERT_EQ(v1, v2)
const int kRegister_rdx_Code
static XMMRegister from_code(int code)
void cmpw(const Operand &dst, Register src)
void cmpq(Register dst, Register src)
void xor_(const Operand &dst, Register src)
void and_(Register dst, Immediate src)
void testq(const Operand &op, Register reg)
void sar_cl(Register dst)
void movd(XMMRegister dst, Register src)
void sbbq(Register dst, Register src)
PositionsRecorder * positions_recorder()
const uint32_t kMaxUInt32
void fisub_s(const Operand &adr)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if NULL
void or_(Register dst, const Operand &src)
void extractps(Register dst, XMMRegister src, byte imm8)
static const byte kJnzShortOpcode
void cmpw(Register dst, Immediate src)
Handle< Object > code_target_object_handle_at(Address pc)
bool requires_rex() const
void subl(Register dst, Immediate src)
void cmpb(Register dst, const Operand &src)
void xor_(const Operand &dst, Immediate src)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
void subq(Register dst, const Operand &src)
static int ToAllocationIndex(Register reg)
void fist_s(const Operand &adr)
void cmpw(Register dst, Register src)
void rcl(Register dst, Immediate imm8)
bool is(XMMRegister reg) const
void and_(const Operand &dst, Immediate src)
const int kRegister_r9_Code
void movsxwq(Register dst, const Operand &src)
static const int kNumRegisters
void bts(Register dst, Register src)
void cmpq(const Operand &dst, Register src)
void sarl(Register dst, Immediate shift_amount)
void addl(Register dst, const Operand &src)
void cmovq(Condition cc, Register dst, Register src)
const int kRegister_rax_Code
static const int kPatchDebugBreakSlotAddressOffset
bool emit_debug_code() const
void subsd(XMMRegister dst, XMMRegister src)
bool uint_is_int32(uint64_t x)
const int kRegister_rdi_Code
const int kRegister_no_reg_Code
void cvttsd2siq(Register dst, XMMRegister src)
void xor_(Register dst, Register src)
void movq(const Operand &dst, Register src)
void cmpl(const Operand &dst, Register src)
void addl(const Operand &dst, Immediate src)
void mul(Register dst, Register src1, Register src2, SBit s=LeaveCC, Condition cond=al)
void cvttsd2si(Register dst, const Operand &src)
void and_(Register dst, Register src)
void shl_cl(Register dst)
void leal(Register dst, const Operand &src)