30 #if V8_TARGET_ARCH_ARM64
32 #define ARM64_DEFINE_FP_STATICS
61 case LDR_d:
return true;
62 default:
return false;
83 case STR_d:
return true;
84 default:
return false;
90 static uint64_t RotateRight(uint64_t value,
95 return ((value & ((1UL << rotate) - 1UL)) << (width - rotate)) |
100 static uint64_t RepeatBitsAcrossReg(
unsigned reg_size,
103 ASSERT((width == 2) || (width == 4) || (width == 8) || (width == 16) ||
106 uint64_t result = value & ((1UL << width) - 1UL);
107 for (
unsigned i = width; i < reg_size; i *= 2) {
108 result |= (result << i);
120 int64_t imm_s = ImmSetBits();
121 int64_t imm_r = ImmRotate();
144 uint64_t bits = (1UL << (imm_s + 1)) - 1;
145 return RotateRight(bits, imm_r, 64);
147 if ((imm_s >> 1) == 0x1F) {
150 for (
int width = 0x20; width >= 0x2; width >>= 1) {
151 if ((imm_s & width) == 0) {
152 int mask = width - 1;
153 if ((imm_s & mask) == mask) {
156 uint64_t bits = (1UL << ((imm_s & mask) + 1)) - 1;
157 return RepeatBitsAcrossReg(reg_size,
158 RotateRight(bits, imm_r & mask, width),
172 uint32_t bits = ImmFP();
173 uint32_t bit7 = (bits >> 7) & 0x1;
174 uint32_t bit6 = (bits >> 6) & 0x1;
175 uint32_t bit5_to_0 = bits & 0x3f;
176 uint32_t result = (bit7 << 31) | ((32 - bit6) << 25) | (bit5_to_0 << 19);
178 return rawbits_to_float(result);
187 uint32_t bits = ImmFP();
188 uint64_t bit7 = (bits >> 7) & 0x1;
189 uint64_t bit6 = (bits >> 6) & 0x1;
190 uint64_t bit5_to_0 = bits & 0x3f;
191 uint64_t result = (bit7 << 63) | ((256 - bit6) << 54) | (bit5_to_0 << 48);
193 return rawbits_to_double(result);
267 Instr branch_imm = 0;
268 uint32_t imm_mask = 0;
273 imm_mask = ImmCondBranch_mask;
278 imm_mask = ImmUncondBranch_mask;
283 imm_mask = ImmCmpBranch_mask;
288 imm_mask = ImmTestBranch_mask;
301 Instr mask = ImmLLiteral_mask;
324 uint64_t payload = ImmMoveWide();
333 #endif // V8_TARGET_ARCH_ARM64
static Instr ImmPCRelAddress(int imm21)
const unsigned kInstructionSizeLog2
bool IsPCRelAddressing() const
static Instr ImmCmpBranch(int imm19)
static Instr ImmTestBranch(int imm14)
const unsigned kLiteralEntrySizeLog2
const unsigned kXRegSizeInBits
V8_INLINE ptrdiff_t DistanceTo(Instruction *target)
void SetBranchImmTarget(Instruction *target)
#define ASSERT(condition)
static bool IsValidImmPCOffset(ImmBranchType branch_type, int32_t offset)
V8_INLINE Instruction * InstructionAtOffset(int64_t offset, CheckAlignment check=CHECK_ALIGNMENT)
bool IsInlineData() const
bool is_intn(int64_t x, unsigned n)
const unsigned kWRegSizeInBits
void SetImmPCOffsetTarget(Instruction *target)
static Instr ImmUncondBranch(int imm26)
static Instr ImmCondBranch(int imm19)
void SetPCRelImmTarget(Instruction *target)
void SetInstructionBits(Instr value)
Instruction * ImmPCOffsetTarget()
const unsigned kInstructionSize
bool IsAligned(T value, U alignment)
static Instr ImmLLiteral(int imm19)
static int ImmBranchRangeBitwidth(ImmBranchType branch_type)
bool IsTargetInImmPCOffsetRange(Instruction *target)
bool IsLdrLiteral() const
void SetImmLLiteral(Instruction *source)
uint64_t InlineData() const
LSDataSize CalcLSPairDataSize(LoadStorePairOp op)
ImmBranchType BranchType() const
Instr Mask(uint32_t mask) const