30 #if defined(V8_TARGET_ARCH_X64)
40 #define __ ACCESS_MASM(masm)
43 void Builtins::Generate_Adaptor(MacroAssembler* masm,
59 int num_extra_args = 0;
71 __ addq(
rax, Immediate(num_extra_args + 1));
72 __ JumpToExternalReference(ExternalReference(
id, masm->isolate()), 1);
76 static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
86 void Builtins::Generate_InRecompileQueue(MacroAssembler* masm) {
87 GenerateTailCallToSharedCode(masm);
91 void Builtins::Generate_ParallelRecompile(MacroAssembler* masm) {
101 __ CallRuntime(Runtime::kParallelRecompile, 1);
111 GenerateTailCallToSharedCode(masm);
115 static void Generate_JSConstructStubHelper(MacroAssembler* masm,
116 bool is_api_function,
117 bool count_constructions) {
124 ASSERT(!is_api_function || !count_constructions);
128 FrameScope scope(masm, StackFrame::CONSTRUCT);
139 Label rt_call, allocated;
140 if (FLAG_inline_new) {
141 Label undo_allocation;
143 #ifdef ENABLE_DEBUGGER_SUPPORT
144 ExternalReference debug_step_in_fp =
145 ExternalReference::debug_step_in_fp_address(masm->isolate());
157 __ JumpIfSmi(
rax, &rt_call);
171 if (count_constructions) {
184 __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
196 __ AllocateInNewSpace(
rdi,
207 __ LoadRoot(
rcx, Heap::kEmptyFixedArrayRootIndex);
215 __ LoadRoot(
rdx, Heap::kUndefinedValueRootIndex);
216 if (count_constructions) {
222 if (FLAG_debug_code) {
225 "Unexpected number of pre-allocated property fields.");
228 __ LoadRoot(
rdx, Heap::kOnePointerFillerMapRootIndex);
256 __ Assert(
positive,
"Property allocation count failed.");
277 __ LoadRoot(
rcx, Heap::kFixedArrayMapRootIndex);
288 __ LoadRoot(
rdx, Heap::kUndefinedValueRootIndex);
315 __ bind(&undo_allocation);
316 __ UndoAllocationInNewSpace(
rbx);
325 __ CallRuntime(Runtime::kNewObject, 1);
358 if (is_api_function) {
361 masm->isolate()->builtins()->HandleApiCallConstruct();
362 ParameterCount expected(0);
363 __ InvokeCode(code, expected, expected, RelocInfo::CODE_TARGET,
366 ParameterCount actual(
rax);
372 if (!is_api_function && !count_constructions) {
373 masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
382 Label use_receiver, exit;
384 __ JumpIfSmi(
rax, &use_receiver);
394 __ bind(&use_receiver);
409 Counters* counters = masm->isolate()->counters();
410 __ IncrementCounter(counters->constructed_objects(), 1);
415 void Builtins::Generate_JSConstructStubCountdown(MacroAssembler* masm) {
416 Generate_JSConstructStubHelper(masm,
false,
true);
420 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
421 Generate_JSConstructStubHelper(masm,
false,
false);
425 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
426 Generate_JSConstructStubHelper(masm,
true,
false);
430 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
519 __ addq(
rcx, Immediate(1));
530 ParameterCount actual(
rax);
545 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
546 Generate_JSEntryTrampolineHelper(masm,
false);
550 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
551 Generate_JSEntryTrampolineHelper(masm,
true);
555 void Builtins::Generate_LazyCompile(MacroAssembler* masm) {
566 __ CallRuntime(Runtime::kLazyCompile, 1);
582 void Builtins::Generate_LazyRecompile(MacroAssembler* masm) {
593 __ CallRuntime(Runtime::kLazyRecompile, 1);
609 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
618 __ CallRuntime(Runtime::kNotifyDeoptimized, 1);
626 Label not_no_registers, not_tos_rax;
631 __ bind(¬_no_registers);
637 __ bind(¬_tos_rax);
638 __ Abort(
"no cases left");
641 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
646 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
651 void Builtins::Generate_NotifyOSR(MacroAssembler* masm) {
659 __ CallRuntime(Runtime::kNotifyOSR, 0);
666 void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
682 __ Push(masm->isolate()->factory()->undefined_value());
690 Label slow, non_function;
693 __ JumpIfSmi(
rdi, &non_function);
698 Label shift_arguments;
700 { Label convert_to_object, use_global_receiver, patch_receiver;
718 __ JumpIfSmi(
rbx, &convert_to_object, Label::kNear);
720 __ CompareRoot(
rbx, Heap::kNullValueRootIndex);
721 __ j(
equal, &use_global_receiver);
722 __ CompareRoot(
rbx, Heap::kUndefinedValueRootIndex);
723 __ j(
equal, &use_global_receiver);
729 __ bind(&convert_to_object);
747 __ jmp(&patch_receiver, Label::kNear);
751 __ bind(&use_global_receiver);
752 const int kGlobalIndex =
759 __ bind(&patch_receiver);
762 __ jmp(&shift_arguments);
770 __ bind(&non_function);
782 __ bind(&shift_arguments);
796 { Label
function, non_proxy;
801 __ cmpq(
rdx, Immediate(1));
808 __ GetBuiltinEntry(
rdx, Builtins::CALL_FUNCTION_PROXY);
809 __ jmp(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
810 RelocInfo::CODE_TARGET);
813 __ GetBuiltinEntry(
rdx, Builtins::CALL_NON_FUNCTION);
814 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
815 RelocInfo::CODE_TARGET);
830 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
831 RelocInfo::CODE_TARGET);
833 ParameterCount expected(0);
839 void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
857 __ push(Operand(
rbp, kFunctionOffset));
858 __ push(Operand(
rbp, kArgumentsOffset));
878 __ push(Operand(
rbp, kFunctionOffset));
885 const int kLimitOffset =
887 const int kIndexOffset = kLimitOffset - 1 *
kPointerSize;
889 __ push(Immediate(0));
892 __ movq(
rbx, Operand(
rbp, kReceiverOffset));
896 __ movq(
rdi, Operand(
rbp, kFunctionOffset));
904 Label call_to_object, use_global_receiver;
916 __ JumpIfSmi(
rbx, &call_to_object, Label::kNear);
917 __ CompareRoot(
rbx, Heap::kNullValueRootIndex);
918 __ j(
equal, &use_global_receiver);
919 __ CompareRoot(
rbx, Heap::kUndefinedValueRootIndex);
920 __ j(
equal, &use_global_receiver);
929 __ bind(&call_to_object);
933 __ jmp(&push_receiver, Label::kNear);
936 __ bind(&use_global_receiver);
937 const int kGlobalOffset =
945 __ bind(&push_receiver);
950 __ movq(
rax, Operand(
rbp, kIndexOffset));
953 __ movq(
rdx, Operand(
rbp, kArgumentsOffset));
957 masm->isolate()->builtins()->KeyedLoadIC_Initialize();
958 __ Call(ic, RelocInfo::CODE_TARGET);
968 __ movq(
rax, Operand(
rbp, kIndexOffset));
970 __ movq(Operand(
rbp, kIndexOffset),
rax);
973 __ cmpq(
rax, Operand(
rbp, kLimitOffset));
978 ParameterCount actual(
rax);
980 __ movq(
rdi, Operand(
rbp, kFunctionOffset));
986 frame_scope.GenerateLeaveFrame();
987 __ ret(3 * kPointerSize);
990 __ bind(&call_proxy);
995 __ GetBuiltinEntry(
rdx, Builtins::CALL_FUNCTION_PROXY);
996 __ call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
997 RelocInfo::CODE_TARGET);
1001 __ ret(3 * kPointerSize);
1009 static void AllocateEmptyJSArray(MacroAssembler* masm,
1010 Register array_function,
1015 Label* gc_required) {
1019 __ LoadInitialArrayMap(array_function, scratch2, scratch1,
false);
1024 if (initial_capacity > 0) {
1027 __ AllocateInNewSpace(size,
1039 Factory* factory = masm->isolate()->factory();
1042 factory->empty_fixed_array());
1048 if (initial_capacity == 0) {
1050 factory->empty_fixed_array());
1067 factory->fixed_array_map());
1073 static const int kLoopUnfoldLimit = 4;
1074 __ LoadRoot(scratch3, Heap::kTheHoleValueRootIndex);
1075 if (initial_capacity <= kLoopUnfoldLimit) {
1078 for (
int i = 0; i < initial_capacity; i++) {
1085 __ movq(scratch2, Immediate(initial_capacity));
1109 static void AllocateJSArray(MacroAssembler* masm,
1110 Register array_function,
1111 Register array_size,
1113 Register elements_array,
1114 Register elements_array_end,
1116 bool fill_with_hole,
1117 Label* gc_required) {
1118 __ LoadInitialArrayMap(array_function, scratch,
1119 elements_array, fill_with_hole);
1121 if (FLAG_debug_code) {
1122 __ testq(array_size, array_size);
1123 __ Assert(
not_zero,
"array size is unexpectedly 0");
1145 Factory* factory = masm->isolate()->factory();
1147 __ Move(elements_array, factory->empty_fixed_array());
1166 factory->fixed_array_map());
1175 if (fill_with_hole) {
1177 __ LoadRoot(scratch, Heap::kTheHoleValueRootIndex);
1178 __ lea(elements_array, Operand(elements_array,
1182 __ movq(Operand(elements_array, 0), scratch);
1183 __ addq(elements_array, Immediate(kPointerSize));
1185 __ cmpq(elements_array, elements_array_end);
1205 static void ArrayNativeCode(MacroAssembler* masm,
1206 Label* call_generic_code) {
1207 Label argc_one_or_more, argc_two_or_more, empty_array, not_empty_array,
1208 has_non_smi_element, finish, cant_transition_map, not_double;
1214 __ bind(&empty_array);
1216 AllocateEmptyJSArray(masm,
1223 Counters* counters = masm->isolate()->counters();
1224 __ IncrementCounter(counters->array_function_native(), 1);
1226 __ ret(kPointerSize);
1230 __ bind(&argc_one_or_more);
1231 __ cmpq(
rax, Immediate(1));
1233 __ movq(
rdx, Operand(
rsp, kPointerSize));
1240 __ movq(
rax, Immediate(0));
1241 __ jmp(&empty_array);
1243 __ bind(¬_empty_array);
1244 __ JumpUnlessNonNegativeSmi(
rdx, call_generic_code);
1256 AllocateJSArray(masm,
1265 __ IncrementCounter(counters->array_function_native(), 1);
1267 __ ret(2 * kPointerSize);
1270 __ bind(&argc_two_or_more);
1278 AllocateJSArray(masm,
1287 __ IncrementCounter(counters->array_function_native(), 1);
1297 __ lea(
r9, Operand(
rsp, kPointerSize));
1314 if (FLAG_smi_only_arrays) {
1315 __ JumpIfNotSmi(
r8, &has_non_smi_element);
1318 __ addq(
rdx, Immediate(kPointerSize));
1335 __ bind(&has_non_smi_element);
1338 masm->isolate()->factory()->heap_number_map(),
1341 __ bind(&cant_transition_map);
1342 __ UndoAllocationInNewSpace(
rbx);
1343 __ jmp(call_generic_code);
1345 __ bind(¬_double);
1353 &cant_transition_map);
1364 __ addq(
rdx, Immediate(kPointerSize));
1371 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
1377 Label generic_array_code;
1382 if (FLAG_debug_code) {
1388 __ Check(not_smi,
"Unexpected initial map for InternalArray function");
1390 __ Check(
equal,
"Unexpected initial map for InternalArray function");
1395 ArrayNativeCode(masm, &generic_array_code);
1399 __ bind(&generic_array_code);
1400 Handle<Code> array_code =
1401 masm->isolate()->builtins()->InternalArrayCodeGeneric();
1402 __ Jump(array_code, RelocInfo::CODE_TARGET);
1406 void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
1412 Label generic_array_code;
1417 if (FLAG_debug_code) {
1423 __ Check(not_smi,
"Unexpected initial map for Array function");
1425 __ Check(
equal,
"Unexpected initial map for Array function");
1429 ArrayNativeCode(masm, &generic_array_code);
1433 __ bind(&generic_array_code);
1434 Handle<Code> array_code =
1435 masm->isolate()->builtins()->ArrayCodeGeneric();
1436 __ Jump(array_code, RelocInfo::CODE_TARGET);
1440 void Builtins::Generate_ArrayConstructCode(MacroAssembler* masm) {
1447 Label generic_constructor;
1449 if (FLAG_debug_code) {
1457 __ Check(not_smi,
"Unexpected initial map for Array function");
1459 __ Check(
equal,
"Unexpected initial map for Array function");
1463 ArrayNativeCode(masm, &generic_constructor);
1467 __ bind(&generic_constructor);
1468 Handle<Code> generic_construct_stub =
1469 masm->isolate()->builtins()->JSConstructStubGeneric();
1470 __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET);
1474 void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
1482 Counters* counters = masm->isolate()->counters();
1483 __ IncrementCounter(counters->string_ctor_calls(), 1);
1485 if (FLAG_debug_code) {
1488 __ Assert(
equal,
"Unexpected String function");
1495 __ j(
zero, &no_arguments);
1503 Label not_cached, argument_is_string;
1512 __ IncrementCounter(counters->string_ctor_cached_number(), 1);
1513 __ bind(&argument_is_string);
1531 __ LoadGlobalFunctionInitialMap(
rdi,
rcx);
1532 if (FLAG_debug_code) {
1535 __ Assert(
equal,
"Unexpected string wrapper instance size");
1537 __ Assert(
equal,
"Unexpected unused properties of string wrapper");
1542 __ LoadRoot(
rcx, Heap::kEmptyFixedArrayRootIndex);
1557 Label convert_argument;
1558 __ bind(¬_cached);
1560 __ JumpIfSmi(
rax, &convert_argument);
1564 __ IncrementCounter(counters->string_ctor_string_value(), 1);
1565 __ jmp(&argument_is_string);
1568 __ bind(&convert_argument);
1569 __ IncrementCounter(counters->string_ctor_conversions(), 1);
1578 __ jmp(&argument_is_string);
1582 __ bind(&no_arguments);
1583 __ LoadRoot(
rbx, Heap::kEmptyStringRootIndex);
1585 __ lea(
rsp, Operand(
rsp, kPointerSize));
1587 __ jmp(&argument_is_string);
1591 __ bind(&gc_required);
1592 __ IncrementCounter(counters->string_ctor_gc_required(), 1);
1596 __ CallRuntime(Runtime::kNewStringWrapper, 1);
1602 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1620 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1631 __ lea(
rsp, Operand(
rsp, index.reg, index.scale, 1 * kPointerSize));
1636 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
1644 Label invoke, dont_adapt_arguments;
1645 Counters* counters = masm->isolate()->counters();
1646 __ IncrementCounter(counters->arguments_adaptors(), 1);
1648 Label enough, too_few;
1652 __ j(
equal, &dont_adapt_arguments);
1656 EnterArgumentsAdaptorFrame(masm);
1666 __ push(Operand(
rax, 0));
1667 __ subq(
rax, Immediate(kPointerSize));
1675 EnterArgumentsAdaptorFrame(masm);
1685 __ push(Operand(
rdi, 0));
1686 __ subq(
rdi, Immediate(kPointerSize));
1708 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
1711 LeaveArgumentsAdaptorFrame(masm);
1717 __ bind(&dont_adapt_arguments);
1722 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1744 __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
1756 __ bind(&stack_check);
1758 __ CompareRoot(
rsp, Heap::kStackLimitRootIndex);
1761 StackCheckStub stub;
1762 __ TailCallStub(&stub);
1763 if (FLAG_debug_code) {
1764 __ Abort(
"Unreachable code: returned from tail call.");
1785 #endif // V8_TARGET_ARCH_X64
static const int kLengthOffset
static const int kCodeOffset
static const int kCodeEntryOffset
static const int kPrototypeOrInitialMapOffset
static const int kAllowOSRAtLoopNestingLevelOffset
static Smi * FromInt(int value)
static const int kGlobalReceiverOffset
static const int kConstructionCountOffset
static const int kNativeByteOffset
static const int kStrictModeBitWithinByte
static const int kCallerSPOffset
#define ASSERT(condition)
const int kPointerSizeLog2
static const int kInstanceSizeOffset
static const int kUnusedPropertyFieldsOffset
static const int kContextOffset
STATIC_ASSERT((FixedDoubleArray::kHeaderSize &kDoubleAlignmentMask)==0)
static const int kDontAdaptArgumentsSentinel
Operand FieldOperand(Register object, int offset)
static const int kExpressionsOffset
static const int kPropertiesOffset
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print more details following each garbage collection print amount of external allocated memory after each time it is adjusted flush code that we expect not to use again before full gc do incremental marking steps track object counts and memory usage use caching Perform compaction on every full GC Never perform compaction on full GC testing only Compact code space on full incremental collections Default seed for initializing random generator(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer
static const int kInObjectPropertiesOffset
static const int kElementsOffset
static const int kNativeBitWithinByte
static const int kLengthOffset
static int SizeFor(int length)
static const int kHeaderSize
static const int kMapOffset
static const int kLengthOffset
const Register kScratchRegister
static const int kContextOffset
static const int kFunctionOffset
static const int kFormalParameterCountOffset
static const int kStrictModeByteOffset
static const int kHeaderSize
Condition NegateCondition(Condition cond)
static const int kArgvOffset
static const int kHeaderSize
static void GenerateLookupNumberStringCache(MacroAssembler *masm, Register object, Register result, Register scratch1, Register scratch2, Register scratch3, bool object_is_smi, Label *not_found)
static const int kPreallocatedArrayElements
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
static const int kValueOffset
static const int kNativeContextOffset
static const int kSharedFunctionInfoOffset
static const int kInitialMaxFastElementArray
static const int kPreAllocatedPropertyFieldsOffset