30 #if V8_TARGET_ARCH_X64
41 #define __ ACCESS_MASM(masm)
44 void Builtins::Generate_Adaptor(MacroAssembler* masm,
60 int num_extra_args = 0;
72 __ addp(
rax, Immediate(num_extra_args + 1));
73 __ JumpToExternalReference(ExternalReference(
id, masm->isolate()), 1);
77 static void CallRuntimePassFunction(
85 __ CallRuntime(function_id, 1);
91 static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
101 static void GenerateTailCallToReturnedCode(MacroAssembler* masm) {
107 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
114 __ CompareRoot(
rsp, Heap::kStackLimitRootIndex);
117 CallRuntimePassFunction(masm, Runtime::kHiddenTryInstallOptimizedCode);
118 GenerateTailCallToReturnedCode(masm);
121 GenerateTailCallToSharedCode(masm);
125 static void Generate_JSConstructStubHelper(MacroAssembler* masm,
126 bool is_api_function,
127 bool count_constructions,
128 bool create_memento) {
136 ASSERT(!is_api_function || !count_constructions);\
139 ASSERT(!is_api_function || !create_memento);
142 ASSERT(!count_constructions || !create_memento);
146 FrameScope scope(masm, StackFrame::CONSTRUCT);
148 if (create_memento) {
149 __ AssertUndefinedOrAllocationSite(
rbx);
162 Label rt_call, allocated;
163 if (FLAG_inline_new) {
164 Label undo_allocation;
166 #ifdef ENABLE_DEBUGGER_SUPPORT
167 ExternalReference debug_step_in_fp =
168 ExternalReference::debug_step_in_fp_address(masm->isolate());
180 __ JumpIfSmi(
rax, &rt_call);
194 if (count_constructions) {
207 __ CallRuntime(Runtime::kHiddenFinalizeInstanceSize, 1);
218 if (create_memento) {
228 Factory* factory = masm->isolate()->factory();
234 __ LoadRoot(
rcx, Heap::kEmptyFixedArrayRootIndex);
242 __ LoadRoot(
rdx, Heap::kUndefinedValueRootIndex);
243 if (count_constructions) {
249 if (FLAG_debug_code) {
252 kUnexpectedNumberOfPreAllocatedPropertyFields);
255 __ LoadRoot(
rdx, Heap::kOnePointerFillerMapRootIndex);
257 }
else if (create_memento) {
263 Handle<Map> allocation_memento_map = factory->allocation_memento_map();
265 allocation_memento_map);
298 __ Assert(
positive, kPropertyAllocationCountFailed);
319 __ LoadRoot(
rcx, Heap::kFixedArrayMapRootIndex);
330 __ LoadRoot(
rdx, Heap::kUndefinedValueRootIndex);
357 __ bind(&undo_allocation);
358 __ UndoAllocationInNewSpace(
rbx);
365 if (create_memento) {
375 if (create_memento) {
376 __ CallRuntime(Runtime::kHiddenNewObjectWithAllocationSite, 2);
378 __ CallRuntime(Runtime::kHiddenNewObject, 1);
385 Label count_incremented;
386 if (create_memento) {
387 __ jmp(&count_incremented);
394 if (create_memento) {
396 __ Cmp(
rcx, masm->isolate()->factory()->undefined_value());
397 __ j(
equal, &count_incremented);
403 __ bind(&count_incremented);
433 if (is_api_function) {
436 masm->isolate()->builtins()->HandleApiCallConstruct();
437 __ Call(code, RelocInfo::CODE_TARGET);
439 ParameterCount actual(
rax);
444 if (!is_api_function && !count_constructions) {
445 masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
454 Label use_receiver, exit;
456 __ JumpIfSmi(
rax, &use_receiver);
466 __ bind(&use_receiver);
477 __ PopReturnAddressTo(
rcx);
480 __ PushReturnAddressFrom(
rcx);
481 Counters* counters = masm->isolate()->counters();
482 __ IncrementCounter(counters->constructed_objects(), 1);
487 void Builtins::Generate_JSConstructStubCountdown(MacroAssembler* masm) {
488 Generate_JSConstructStubHelper(masm,
false,
true,
false);
492 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
493 Generate_JSConstructStubHelper(masm,
false,
false, FLAG_pretenuring_call_new);
497 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
498 Generate_JSConstructStubHelper(masm,
true,
false,
false);
502 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
593 __ addp(
rcx, Immediate(1));
601 __ LoadRoot(
rbx, Heap::kUndefinedValueRootIndex);
606 ParameterCount actual(
rax);
620 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
621 Generate_JSEntryTrampolineHelper(masm,
false);
625 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
626 Generate_JSEntryTrampolineHelper(masm,
true);
630 void Builtins::Generate_CompileUnoptimized(MacroAssembler* masm) {
631 CallRuntimePassFunction(masm, Runtime::kHiddenCompileUnoptimized);
632 GenerateTailCallToReturnedCode(masm);
636 static void CallCompileOptimized(MacroAssembler* masm,
644 __ Push(masm->isolate()->factory()->ToBoolean(concurrent));
646 __ CallRuntime(Runtime::kHiddenCompileOptimized, 2);
652 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
653 CallCompileOptimized(masm,
false);
654 GenerateTailCallToReturnedCode(masm);
658 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
659 CallCompileOptimized(masm,
true);
660 GenerateTailCallToReturnedCode(masm);
664 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
673 __ subp(Operand(
rsp, 0), Immediate(5));
675 __ Move(
arg_reg_2, ExternalReference::isolate_address(masm->isolate()));
678 FrameScope scope(masm, StackFrame::MANUAL);
679 __ PrepareCallCFunction(2);
681 ExternalReference::get_make_code_young_function(masm->isolate()), 2);
688 #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \
689 void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \
690 MacroAssembler* masm) { \
691 GenerateMakeCodeYoungAgainCommon(masm); \
693 void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \
694 MacroAssembler* masm) { \
695 GenerateMakeCodeYoungAgainCommon(masm); \
698 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
701 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
707 __ Move(
arg_reg_2, ExternalReference::isolate_address(masm->isolate()));
711 FrameScope scope(masm, StackFrame::MANUAL);
712 __ PrepareCallCFunction(2);
714 ExternalReference::get_mark_code_as_executed_function(masm->isolate()),
732 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
733 GenerateMakeCodeYoungAgainCommon(masm);
737 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
747 __ CallRuntime(Runtime::kHiddenNotifyStubFailure, 0, save_doubles);
757 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
762 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
763 Generate_NotifyStubFailureHelper(masm,
kSaveFPRegs);
767 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
776 __ CallRuntime(Runtime::kHiddenNotifyDeoptimized, 1);
784 Label not_no_registers, not_tos_rax;
789 __ bind(¬_no_registers);
795 __ bind(¬_tos_rax);
796 __ Abort(kNoCasesLeft);
800 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
805 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
810 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
815 void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
830 __ PopReturnAddressTo(
rbx);
831 __ Push(masm->isolate()->factory()->undefined_value());
832 __ PushReturnAddressFrom(
rbx);
839 Label slow, non_function;
840 StackArgumentsAccessor args(
rsp,
rax);
841 __ movp(
rdi, args.GetReceiverOperand());
842 __ JumpIfSmi(
rdi, &non_function);
847 Label shift_arguments;
849 { Label convert_to_object, use_global_receiver, patch_receiver;
866 __ movp(
rbx, args.GetArgumentOperand(1));
867 __ JumpIfSmi(
rbx, &convert_to_object, Label::kNear);
869 __ CompareRoot(
rbx, Heap::kNullValueRootIndex);
870 __ j(
equal, &use_global_receiver);
871 __ CompareRoot(
rbx, Heap::kUndefinedValueRootIndex);
872 __ j(
equal, &use_global_receiver);
878 __ bind(&convert_to_object);
895 __ movp(
rdi, args.GetReceiverOperand());
896 __ jmp(&patch_receiver, Label::kNear);
898 __ bind(&use_global_receiver);
903 __ bind(&patch_receiver);
904 __ movp(args.GetArgumentOperand(1),
rbx);
906 __ jmp(&shift_arguments);
914 __ bind(&non_function);
921 __ movp(args.GetArgumentOperand(1),
rdi);
926 __ bind(&shift_arguments);
940 { Label
function, non_proxy;
944 __ cmpp(
rdx, Immediate(1));
947 __ PopReturnAddressTo(
rdx);
949 __ PushReturnAddressFrom(
rdx);
951 __ GetBuiltinEntry(
rdx, Builtins::CALL_FUNCTION_PROXY);
952 __ jmp(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
953 RelocInfo::CODE_TARGET);
956 __ GetBuiltinEntry(
rdx, Builtins::CALL_NON_FUNCTION);
957 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
958 RelocInfo::CODE_TARGET);
972 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
973 RelocInfo::CODE_TARGET);
975 ParameterCount expected(0);
980 void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
995 static const int kReceiverOffset = kArgumentsOffset +
kPointerSize;
996 static const int kFunctionOffset = kReceiverOffset +
kPointerSize;
998 __ Push(Operand(
rbp, kFunctionOffset));
999 __ Push(Operand(
rbp, kArgumentsOffset));
1019 __ Push(Operand(
rbp, kFunctionOffset));
1026 const int kLimitOffset =
1028 const int kIndexOffset = kLimitOffset - 1 *
kPointerSize;
1030 __ Push(Immediate(0));
1033 __ movp(
rbx, Operand(
rbp, kReceiverOffset));
1036 Label push_receiver;
1037 __ movp(
rdi, Operand(
rbp, kFunctionOffset));
1045 Label call_to_object, use_global_receiver;
1057 __ JumpIfSmi(
rbx, &call_to_object, Label::kNear);
1058 __ CompareRoot(
rbx, Heap::kNullValueRootIndex);
1059 __ j(
equal, &use_global_receiver);
1060 __ CompareRoot(
rbx, Heap::kUndefinedValueRootIndex);
1061 __ j(
equal, &use_global_receiver);
1070 __ bind(&call_to_object);
1074 __ jmp(&push_receiver, Label::kNear);
1076 __ bind(&use_global_receiver);
1082 __ bind(&push_receiver);
1087 __ movp(
rax, Operand(
rbp, kIndexOffset));
1090 __ movp(
rdx, Operand(
rbp, kArgumentsOffset));
1094 masm->isolate()->builtins()->KeyedLoadIC_Initialize();
1095 __ Call(ic, RelocInfo::CODE_TARGET);
1105 __ movp(
rax, Operand(
rbp, kIndexOffset));
1107 __ movp(Operand(
rbp, kIndexOffset),
rax);
1110 __ cmpp(
rax, Operand(
rbp, kLimitOffset));
1115 ParameterCount actual(
rax);
1117 __ movp(
rdi, Operand(
rbp, kFunctionOffset));
1122 frame_scope.GenerateLeaveFrame();
1123 __ ret(3 * kPointerSize);
1126 __ bind(&call_proxy);
1130 __ GetBuiltinEntry(
rdx, Builtins::CALL_FUNCTION_PROXY);
1131 __ call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1132 RelocInfo::CODE_TARGET);
1136 __ ret(3 * kPointerSize);
1140 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
1146 Label generic_array_code;
1151 if (FLAG_debug_code) {
1157 __ Check(not_smi, kUnexpectedInitialMapForInternalArrayFunction);
1159 __ Check(
equal, kUnexpectedInitialMapForInternalArrayFunction);
1165 InternalArrayConstructorStub stub(masm->isolate());
1166 __ TailCallStub(&stub);
1170 void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
1176 Label generic_array_code;
1181 if (FLAG_debug_code) {
1187 __ Check(not_smi, kUnexpectedInitialMapForArrayFunction);
1189 __ Check(
equal, kUnexpectedInitialMapForArrayFunction);
1194 __ LoadRoot(
rbx, Heap::kUndefinedValueRootIndex);
1195 ArrayConstructorStub stub(masm->isolate());
1196 __ TailCallStub(&stub);
1200 void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
1208 Counters* counters = masm->isolate()->counters();
1209 __ IncrementCounter(counters->string_ctor_calls(), 1);
1211 if (FLAG_debug_code) {
1214 __ Assert(
equal, kUnexpectedStringFunction);
1219 StackArgumentsAccessor args(
rsp,
rax);
1222 __ j(
zero, &no_arguments);
1223 __ movp(
rbx, args.GetArgumentOperand(1));
1224 __ PopReturnAddressTo(
rcx);
1226 __ PushReturnAddressFrom(
rcx);
1230 Label not_cached, argument_is_string;
1231 __ LookupNumberStringCache(
rax,
1236 __ IncrementCounter(counters->string_ctor_cached_number(), 1);
1237 __ bind(&argument_is_string);
1255 __ LoadGlobalFunctionInitialMap(
rdi,
rcx);
1256 if (FLAG_debug_code) {
1259 __ Assert(
equal, kUnexpectedStringWrapperInstanceSize);
1261 __ Assert(
equal, kUnexpectedUnusedPropertiesOfStringWrapper);
1266 __ LoadRoot(
rcx, Heap::kEmptyFixedArrayRootIndex);
1281 Label convert_argument;
1282 __ bind(¬_cached);
1284 __ JumpIfSmi(
rax, &convert_argument);
1288 __ IncrementCounter(counters->string_ctor_string_value(), 1);
1289 __ jmp(&argument_is_string);
1292 __ bind(&convert_argument);
1293 __ IncrementCounter(counters->string_ctor_conversions(), 1);
1302 __ jmp(&argument_is_string);
1306 __ bind(&no_arguments);
1307 __ LoadRoot(
rbx, Heap::kempty_stringRootIndex);
1308 __ PopReturnAddressTo(
rcx);
1309 __ leap(
rsp, Operand(
rsp, kPointerSize));
1310 __ PushReturnAddressFrom(
rcx);
1311 __ jmp(&argument_is_string);
1315 __ bind(&gc_required);
1316 __ IncrementCounter(counters->string_ctor_gc_required(), 1);
1320 __ CallRuntime(Runtime::kNewStringWrapper, 1);
1326 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1344 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1353 __ PopReturnAddressTo(
rcx);
1355 __ leap(
rsp, Operand(
rsp, index.reg, index.scale, 1 * kPointerSize));
1356 __ PushReturnAddressFrom(
rcx);
1360 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
1367 Label invoke, dont_adapt_arguments;
1368 Counters* counters = masm->isolate()->counters();
1369 __ IncrementCounter(counters->arguments_adaptors(), 1);
1371 Label enough, too_few;
1376 __ j(
equal, &dont_adapt_arguments);
1380 EnterArgumentsAdaptorFrame(masm);
1390 __ Push(Operand(
rax, 0));
1391 __ subp(
rax, Immediate(kPointerSize));
1399 EnterArgumentsAdaptorFrame(masm);
1409 __ Push(Operand(
rdi, 0));
1410 __ subp(
rdi, Immediate(kPointerSize));
1432 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
1435 LeaveArgumentsAdaptorFrame(masm);
1441 __ bind(&dont_adapt_arguments);
1446 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1453 __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
1458 __ cmpp(
rax, Immediate(0));
1482 void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) {
1485 __ CompareRoot(
rsp, Heap::kStackLimitRootIndex);
1489 __ CallRuntime(Runtime::kHiddenStackGuard, 0);
1491 __ jmp(masm->isolate()->builtins()->OnStackReplacement(),
1492 RelocInfo::CODE_TARGET);
1503 #endif // V8_TARGET_ARCH_X64
static const int kLengthOffset
static const int kCodeOffset
static const int kCodeEntryOffset
static const int kPrototypeOrInitialMapOffset
static int SlotOffset(int index)
static Smi * FromInt(int value)
static const int kGlobalReceiverOffset
static const int kConstructionCountOffset
static const int kDeoptimizationDataOffset
static const int kNativeByteOffset
static const int kStrictModeBitWithinByte
#define ASSERT(condition)
static const int kContextOffset
const int kPointerSizeLog2
static const int kShortCallInstructionLength
static const int kInstanceSizeOffset
static const int kUnusedPropertyFieldsOffset
static const int kContextOffset
static const int kPretenureCreateCountOffset
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
static const int kDontAdaptArgumentsSentinel
static void MaybeCallEntryHook(MacroAssembler *masm)
Operand FieldOperand(Register object, int offset)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
static const int kCallerSPOffset
static const int kPropertiesOffset
static const int kInObjectPropertiesOffset
static const int kExpressionsOffset
static const int kElementsOffset
static const int kNativeBitWithinByte
static int OffsetOfElementAt(int index)
static const int kHeaderSize
static const int kMapOffset
static const int kLengthOffset
const int kNumSafepointRegisters
const Register kScratchRegister
static const int kFunctionOffset
static const int kFormalParameterCountOffset
static const int kStrictModeByteOffset
static const int kHeaderSize
static const int kAllocationSiteOffset
Condition NegateCondition(Condition cond)
Operand StackOperandForReturnAddress(int32_t disp)
static const int kArgvOffset
static const int kHeaderSize
static const int kValueOffset
static const int kSharedFunctionInfoOffset
static const int kPreAllocatedPropertyFieldsOffset