30 #if defined(V8_TARGET_ARCH_X64)
43 ASSERT(!masm->has_frame());
44 masm->set_has_frame(
true);
51 masm->set_has_frame(
false);
75 MacroAssembler masm(
NULL, buffer, static_cast<int>(actual_size));
82 __ fld_d(Operand(
rsp, 0));
85 __ fstp_d(Operand(
rsp, 0));
94 ASSERT(desc.reloc_size == 0);
98 return FUNCTION_CAST<UnaryMathFunction>(buffer);
108 if (buffer ==
NULL)
return &sqrt;
110 MacroAssembler masm(
NULL, buffer, static_cast<int>(actual_size));
118 ASSERT(desc.reloc_size == 0);
122 return FUNCTION_CAST<UnaryMathFunction>(buffer);
127 typedef double (*ModuloFunction)(double, double);
129 ModuloFunction CreateModuloFunction() {
135 Assembler masm(
NULL, buffer, static_cast<int>(actual_size));
158 __ testb(
rax, Immediate(5));
159 __ j(
zero, &no_exceptions);
161 __ bind(&no_exceptions);
166 Label partial_remainder_loop;
167 __ bind(&partial_remainder_loop);
171 __ testl(
rax, Immediate(0x400 ));
181 __ testb(
rax, Immediate(5));
182 __ j(
zero, &valid_result);
184 int64_t kNaNValue = V8_INT64_C(0x7ff8000000000000);
188 __ jmp(&return_result);
191 __ bind(&valid_result);
196 __ bind(&return_result);
199 Label clear_exceptions;
200 __ testb(
rax, Immediate(0x3f ));
203 __ bind(&clear_exceptions);
211 return FUNCTION_CAST<ModuloFunction>(buffer);
221 #define __ ACCESS_MASM(masm)
224 MacroAssembler* masm) {
234 __ RecordWriteField(
rdx,
245 MacroAssembler* masm, Label* fail) {
254 Label allocated, new_backing_store, only_change_map, done;
259 __ CompareRoot(
r8, Heap::kEmptyFixedArrayRootIndex);
266 Heap::kFixedCOWArrayMapRootIndex);
267 __ j(
equal, &new_backing_store);
272 __ JumpIfNotInNewSpace(
r8,
rdi, &new_backing_store);
280 __ LoadRoot(
rdi, Heap::kFixedDoubleArrayMapRootIndex);
286 __ RecordWriteField(
rdx,
299 Label loop, entry, convert_hole;
305 __ bind(&new_backing_store);
309 __ LoadRoot(
rdi, Heap::kFixedDoubleArrayMapRootIndex);
314 __ RecordWriteField(
rdx,
326 __ bind(&only_change_map);
329 __ RecordWriteField(
rdx,
344 __ JumpIfNotSmi(
rbx, &convert_hole);
350 __ bind(&convert_hole);
352 if (FLAG_debug_code) {
353 __ CompareRoot(
rbx, Heap::kTheHoleValueRootIndex);
354 __ Assert(
equal,
"object found in smi-only array");
367 MacroAssembler* masm, Label* fail) {
375 Label loop, entry, convert_hole, gc_required, only_change_map;
380 __ CompareRoot(
r8, Heap::kEmptyFixedArrayRootIndex);
392 __ LoadRoot(
rdi, Heap::kFixedArrayMapRootIndex);
399 __ LoadRoot(
rdi, Heap::kTheHoleValueRootIndex);
405 __ bind(&gc_required);
422 __ AllocateHeapNumber(
rax,
r15, &gc_required);
431 __ RecordWriteArray(
r11,
437 __ jmp(&entry, Label::kNear);
440 __ bind(&convert_hole);
453 __ RecordWriteField(
rdx,
463 __ bind(&only_change_map);
466 __ RecordWriteField(
rdx,
480 Label* call_runtime) {
486 Label check_sequential;
488 __ j(
zero, &check_sequential, Label::kNear);
493 __ j(
zero, &cons_string, Label::kNear);
496 Label indirect_string_loaded;
498 __ addq(index, result);
500 __ jmp(&indirect_string_loaded, Label::kNear);
507 __ bind(&cons_string);
509 Heap::kEmptyStringRootIndex);
513 __ bind(&indirect_string_loaded);
521 __ bind(&check_sequential);
524 __ j(
zero, &seq_string, Label::kNear);
527 Label ascii_external, done;
528 if (FLAG_debug_code) {
532 __ Assert(
zero,
"external string expected, but not found");
544 __ movzxwl(result, Operand(result, index,
times_2, 0));
545 __ jmp(&done, Label::kNear);
546 __ bind(&ascii_external);
548 __ movzxbl(result, Operand(result, index,
times_1, 0));
549 __ jmp(&done, Label::kNear);
553 __ bind(&seq_string);
566 __ jmp(&done, Label::kNear);
582 #endif // V8_TARGET_ARCH_X64
static const int kResourceDataOffset
STATIC_CHECK((kStringRepresentationMask|kStringEncodingMask)==Internals::kFullStringRepresentationMask)
const uint32_t kTwoByteStringTag
static const int kMinimalBufferSize
#define ASSERT(condition)
virtual void AfterCall(MacroAssembler *masm) const
double(* UnaryMathFunction)(double x)
const uint32_t kStringRepresentationMask
static void GenerateOperation(MacroAssembler *masm, TranscendentalCache::Type type)
static const int kFirstOffset
static const int kParentOffset
const uint64_t kHoleNanInt64
STATIC_ASSERT((FixedDoubleArray::kHeaderSize &kDoubleAlignmentMask)==0)
static const int kValueOffset
const uint32_t kIsIndirectStringMask
static void ProtectCode(void *address, const size_t size)
Operand FieldOperand(Register object, int offset)
UnaryMathFunction CreateSqrtFunction()
static void GenerateSmiToDouble(MacroAssembler *masm, Label *fail)
const uint32_t kShortExternalStringTag
static void GenerateMapChangeElementsTransition(MacroAssembler *masm)
static void Generate(MacroAssembler *masm, Register string, Register index, Register result, Label *call_runtime)
static const int kHeaderSize
static const int kElementsOffset
static const int kOffsetOffset
static const int kHeaderSize
static const int kMapOffset
static void GenerateDoubleToObject(MacroAssembler *masm, Label *fail)
const uint32_t kSlicedNotConsMask
static const int kLengthOffset
static const int kSecondOffset
static const int kContextOffset
static void * Allocate(const size_t requested, size_t *allocated, bool is_executable)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if NULL
static void FlushICache(void *start, size_t size)
virtual void BeforeCall(MacroAssembler *masm) const
const uint32_t kAsciiStringTag
UnaryMathFunction CreateTranscendentalFunction(TranscendentalCache::Type type)
const uint32_t kStringEncodingMask
static const int kInstanceTypeOffset