30 #if V8_TARGET_ARCH_ARM
43 #define __ ACCESS_MASM(masm)
46 void Builtins::Generate_Adaptor(MacroAssembler* masm,
61 int num_extra_args = 0;
71 __ add(
r0,
r0, Operand(num_extra_args + 1));
72 __ JumpToExternalReference(ExternalReference(
id, masm->isolate()));
77 static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
94 static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
108 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
114 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
117 GenerateLoadInternalArrayFunction(masm,
r1);
119 if (FLAG_debug_code) {
123 __ Assert(
ne, kUnexpectedInitialMapForInternalArrayFunction);
125 __ Assert(
eq, kUnexpectedInitialMapForInternalArrayFunction);
131 InternalArrayConstructorStub stub(masm->isolate());
132 __ TailCallStub(&stub);
136 void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
142 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
145 GenerateLoadArrayFunction(masm,
r1);
147 if (FLAG_debug_code) {
151 __ Assert(
ne, kUnexpectedInitialMapForArrayFunction);
153 __ Assert(
eq, kUnexpectedInitialMapForArrayFunction);
158 __ LoadRoot(
r2, Heap::kUndefinedValueRootIndex);
159 ArrayConstructorStub stub(masm->isolate());
160 __ TailCallStub(&stub);
164 void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
172 Counters* counters = masm->isolate()->counters();
173 __ IncrementCounter(counters->string_ctor_calls(), 1,
r2,
r3);
175 Register
function =
r1;
176 if (FLAG_debug_code) {
178 __ cmp(
function, Operand(
r2));
179 __ Assert(
eq, kUnexpectedStringFunction);
184 __ cmp(
r0, Operand::Zero());
185 __ b(
eq, &no_arguments);
187 __ sub(
r0,
r0, Operand(1));
192 Register argument =
r2;
193 Label not_cached, argument_is_string;
194 __ LookupNumberStringCache(
r0,
200 __ IncrementCounter(counters->string_ctor_cached_number(), 1,
r3,
r4);
201 __ bind(&argument_is_string);
219 __ LoadGlobalFunctionInitialMap(
function, map,
r4);
220 if (FLAG_debug_code) {
223 __ Assert(
eq, kUnexpectedStringWrapperInstanceSize);
225 __ cmp(
r4, Operand::Zero());
226 __ Assert(
eq, kUnexpectedUnusedPropertiesOfStringWrapper);
230 __ LoadRoot(
r3, Heap::kEmptyFixedArrayRootIndex);
243 Label convert_argument;
244 __ bind(¬_cached);
245 __ JumpIfSmi(
r0, &convert_argument);
252 __ b(
ne, &convert_argument);
253 __ mov(argument,
r0);
254 __ IncrementCounter(counters->string_ctor_conversions(), 1,
r3,
r4);
255 __ b(&argument_is_string);
258 __ bind(&convert_argument);
260 __ IncrementCounter(counters->string_ctor_conversions(), 1,
r3,
r4);
267 __ mov(argument,
r0);
268 __ b(&argument_is_string);
272 __ bind(&no_arguments);
273 __ LoadRoot(argument, Heap::kempty_stringRootIndex);
275 __ b(&argument_is_string);
279 __ bind(&gc_required);
280 __ IncrementCounter(counters->string_ctor_gc_required(), 1,
r3,
r4);
284 __ CallRuntime(Runtime::kNewStringWrapper, 1);
290 static void CallRuntimePassFunction(
298 __ CallRuntime(function_id, 1);
304 static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
312 static void GenerateTailCallToReturnedCode(MacroAssembler* masm) {
318 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
325 __ LoadRoot(
ip, Heap::kStackLimitRootIndex);
329 CallRuntimePassFunction(masm, Runtime::kHiddenTryInstallOptimizedCode);
330 GenerateTailCallToReturnedCode(masm);
333 GenerateTailCallToSharedCode(masm);
337 static void Generate_JSConstructStubHelper(MacroAssembler* masm,
338 bool is_api_function,
339 bool count_constructions,
340 bool create_memento) {
350 ASSERT(!is_api_function || !count_constructions);
353 ASSERT(!is_api_function || !create_memento);
356 ASSERT(!count_constructions || !create_memento);
358 Isolate* isolate = masm->isolate();
362 FrameAndConstantPoolScope scope(masm, StackFrame::CONSTRUCT);
364 if (create_memento) {
365 __ AssertUndefinedOrAllocationSite(
r2,
r3);
376 Label rt_call, allocated;
377 if (FLAG_inline_new) {
378 Label undo_allocation;
379 #ifdef ENABLE_DEBUGGER_SUPPORT
380 ExternalReference debug_step_in_fp =
381 ExternalReference::debug_step_in_fp_address(isolate);
382 __ mov(
r2, Operand(debug_step_in_fp));
391 __ JumpIfSmi(
r2, &rt_call);
403 if (count_constructions) {
409 __ ldrb(
r4, constructor_count);
411 __ strb(
r4, constructor_count);
418 __ CallRuntime(Runtime::kHiddenFinalizeInstanceSize, 1);
430 if (create_memento) {
442 __ LoadRoot(
r6, Heap::kEmptyFixedArrayRootIndex);
459 if (count_constructions) {
460 __ LoadRoot(
r6, Heap::kUndefinedValueRootIndex);
466 if (FLAG_debug_code) {
469 __ Assert(
le, kUnexpectedNumberOfPreAllocatedPropertyFields);
471 __ InitializeFieldsWithFiller(
r5,
r0,
r6);
473 __ LoadRoot(
r6, Heap::kOnePointerFillerMapRootIndex);
475 __ InitializeFieldsWithFiller(
r5,
r0,
r6);
476 }
else if (create_memento) {
479 __ LoadRoot(
r6, Heap::kUndefinedValueRootIndex);
480 __ InitializeFieldsWithFiller(
r5,
r0,
r6);
484 __ LoadRoot(
r6, Heap::kAllocationMementoMapRootIndex);
492 __ LoadRoot(
r6, Heap::kUndefinedValueRootIndex);
494 __ InitializeFieldsWithFiller(
r5,
r0,
r6);
520 __ b(
eq, &allocated);
521 __ Assert(
pl, kPropertyAllocationCountFailed);
543 __ LoadRoot(
r6, Heap::kFixedArrayMapRootIndex);
560 __ LoadRoot(
r0, Heap::kUndefinedValueRootIndex);
586 __ bind(&undo_allocation);
587 __ UndoAllocationInNewSpace(
r4,
r5);
593 if (create_memento) {
600 if (create_memento) {
601 __ CallRuntime(Runtime::kHiddenNewObjectWithAllocationSite, 2);
603 __ CallRuntime(Runtime::kHiddenNewObject, 1);
610 Label count_incremented;
611 if (create_memento) {
612 __ jmp(&count_incremented);
619 if (create_memento) {
621 __ LoadRoot(
r5, Heap::kUndefinedValueRootIndex);
623 __ b(
eq, &count_incremented);
631 __ bind(&count_incremented);
672 if (is_api_function) {
675 masm->isolate()->builtins()->HandleApiCallConstruct();
676 __ Call(code, RelocInfo::CODE_TARGET);
678 ParameterCount actual(
r0);
683 if (!is_api_function && !count_constructions) {
684 masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
697 Label use_receiver, exit;
704 __ JumpIfSmi(
r0, &use_receiver);
713 __ bind(&use_receiver);
730 __ IncrementCounter(isolate->counters()->constructed_objects(), 1,
r1,
r2);
735 void Builtins::Generate_JSConstructStubCountdown(MacroAssembler* masm) {
736 Generate_JSConstructStubHelper(masm,
false,
true,
false);
740 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
741 Generate_JSConstructStubHelper(masm,
false,
false, FLAG_pretenuring_call_new);
745 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
746 Generate_JSConstructStubHelper(masm,
true,
false,
false);
750 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
762 __ mov(
cp, Operand::Zero());
771 __ InitializeRootRegister();
795 __ LoadRoot(
r4, Heap::kUndefinedValueRootIndex);
798 if (!FLAG_enable_ool_constant_pool) {
809 __ LoadRoot(
r2, Heap::kUndefinedValueRootIndex);
813 ParameterCount actual(
r0);
826 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
827 Generate_JSEntryTrampolineHelper(masm,
false);
831 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
832 Generate_JSEntryTrampolineHelper(masm,
true);
836 void Builtins::Generate_CompileUnoptimized(MacroAssembler* masm) {
837 CallRuntimePassFunction(masm, Runtime::kHiddenCompileUnoptimized);
838 GenerateTailCallToReturnedCode(masm);
842 static void CallCompileOptimized(MacroAssembler* masm,
bool concurrent) {
849 __ Push(masm->isolate()->factory()->ToBoolean(concurrent));
851 __ CallRuntime(Runtime::kHiddenCompileOptimized, 2);
857 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
858 CallCompileOptimized(masm,
false);
859 GenerateTailCallToReturnedCode(masm);
863 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
864 CallCompileOptimized(masm,
true);
865 GenerateTailCallToReturnedCode(masm);
869 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
880 FrameScope scope(masm, StackFrame::MANUAL);
882 __ PrepareCallCFunction(2, 0,
r2);
883 __ mov(
r1, Operand(ExternalReference::isolate_address(masm->isolate())));
885 ExternalReference::get_make_code_young_function(masm->isolate()), 2);
890 #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \
891 void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \
892 MacroAssembler* masm) { \
893 GenerateMakeCodeYoungAgainCommon(masm); \
895 void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \
896 MacroAssembler* masm) { \
897 GenerateMakeCodeYoungAgainCommon(masm); \
900 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
903 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
913 FrameScope scope(masm, StackFrame::MANUAL);
915 __ PrepareCallCFunction(2, 0,
r2);
916 __ mov(
r1, Operand(ExternalReference::isolate_address(masm->isolate())));
917 __ CallCFunction(ExternalReference::get_mark_code_as_executed_function(
918 masm->isolate()), 2);
922 __ PushFixedFrame(
r1);
931 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
932 GenerateMakeCodeYoungAgainCommon(masm);
936 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
946 __ CallRuntime(Runtime::kHiddenNotifyStubFailure, 0, save_doubles);
955 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
960 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
961 Generate_NotifyStubFailureHelper(masm,
kSaveFPRegs);
965 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
972 __ CallRuntime(Runtime::kHiddenNotifyDeoptimized, 1);
979 Label with_tos_register, unknown_state;
981 __ b(
ne, &with_tos_register);
985 __ bind(&with_tos_register);
988 __ b(
ne, &unknown_state);
992 __ bind(&unknown_state);
993 __ stop(
"no cases left");
997 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1002 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
1007 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
1012 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1019 __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
1034 { ConstantPoolUnavailableScope constant_pool_unavailable(masm);
1035 if (FLAG_enable_ool_constant_pool) {
1046 __ add(
r0,
r0, Operand::SmiUntag(
r1));
1055 void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) {
1058 __ LoadRoot(
ip, Heap::kStackLimitRootIndex);
1063 __ CallRuntime(Runtime::kHiddenStackGuard, 0);
1065 __ Jump(masm->isolate()->builtins()->OnStackReplacement(),
1066 RelocInfo::CODE_TARGET);
1073 void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
1077 __ cmp(
r0, Operand::Zero());
1079 __ LoadRoot(
r2, Heap::kUndefinedValueRootIndex);
1081 __ add(
r0,
r0, Operand(1));
1088 Label slow, non_function;
1090 __ JumpIfSmi(
r1, &non_function);
1097 Label shift_arguments;
1098 __ mov(
r4, Operand::Zero());
1099 { Label convert_to_object, use_global_receiver, patch_receiver;
1108 __ b(
ne, &shift_arguments);
1112 __ b(
ne, &shift_arguments);
1120 __ JumpIfSmi(
r2, &convert_to_object);
1122 __ LoadRoot(
r3, Heap::kUndefinedValueRootIndex);
1124 __ b(
eq, &use_global_receiver);
1125 __ LoadRoot(
r3, Heap::kNullValueRootIndex);
1127 __ b(
eq, &use_global_receiver);
1131 __ b(
ge, &shift_arguments);
1133 __ bind(&convert_to_object);
1153 __ mov(
r4, Operand::Zero());
1154 __ jmp(&patch_receiver);
1156 __ bind(&use_global_receiver);
1160 __ bind(&patch_receiver);
1164 __ jmp(&shift_arguments);
1169 __ mov(
r4, Operand(1, RelocInfo::NONE32));
1171 __ b(
eq, &shift_arguments);
1172 __ bind(&non_function);
1173 __ mov(
r4, Operand(2, RelocInfo::NONE32));
1191 __ bind(&shift_arguments);
1204 __ sub(
r0,
r0, Operand(1));
1213 { Label
function, non_proxy;
1215 __ b(
eq, &
function);
1217 __ mov(
r2, Operand::Zero());
1218 __ cmp(
r4, Operand(1));
1219 __ b(
ne, &non_proxy);
1222 __ add(
r0,
r0, Operand(1));
1223 __ GetBuiltinFunction(
r1, Builtins::CALL_FUNCTION_PROXY);
1224 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1225 RelocInfo::CODE_TARGET);
1227 __ bind(&non_proxy);
1228 __ GetBuiltinFunction(
r1, Builtins::CALL_NON_FUNCTION);
1229 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1230 RelocInfo::CODE_TARGET);
1244 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1245 RelocInfo::CODE_TARGET,
1249 ParameterCount expected(0);
1254 void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
1255 const int kIndexOffset =
1257 const int kLimitOffset =
1276 __ LoadRoot(
r2, Heap::kRealStackLimitRootIndex);
1281 __ cmp(
r2, Operand::PointerOffsetFromSmiKey(
r0));
1293 __ mov(
r1, Operand::Zero());
1300 Label push_receiver;
1303 __ b(
ne, &push_receiver);
1312 Label call_to_object, use_global_receiver;
1316 __ b(
ne, &push_receiver);
1320 __ b(
ne, &push_receiver);
1323 __ JumpIfSmi(
r0, &call_to_object);
1324 __ LoadRoot(
r1, Heap::kNullValueRootIndex);
1326 __ b(
eq, &use_global_receiver);
1327 __ LoadRoot(
r1, Heap::kUndefinedValueRootIndex);
1329 __ b(
eq, &use_global_receiver);
1335 __ b(
ge, &push_receiver);
1339 __ bind(&call_to_object);
1342 __ b(&push_receiver);
1344 __ bind(&use_global_receiver);
1350 __ bind(&push_receiver);
1366 __ CallRuntime(Runtime::kGetProperty, 2);
1383 ParameterCount actual(
r0);
1387 __ b(
ne, &call_proxy);
1390 frame_scope.GenerateLeaveFrame();
1395 __ bind(&call_proxy);
1397 __ add(
r0,
r0, Operand(1));
1398 __ mov(
r2, Operand::Zero());
1399 __ GetBuiltinFunction(
r1, Builtins::CALL_FUNCTION_PROXY);
1400 __ Call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1401 RelocInfo::CODE_TARGET);
1410 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1414 (FLAG_enable_ool_constant_pool ?
pp.
bit() : 0) |
1421 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1430 if (FLAG_enable_ool_constant_pool) {
1437 __ add(
sp,
sp, Operand::PointerOffsetFromSmiKey(
r1));
1442 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
1449 Label invoke, dont_adapt_arguments;
1451 Label enough, too_few;
1456 __ b(
eq, &dont_adapt_arguments);
1460 EnterArgumentsAdaptorFrame(masm);
1467 __ add(
r0,
fp, Operand::PointerOffsetFromSmiKey(
r0));
1491 EnterArgumentsAdaptorFrame(masm);
1498 __ add(
r0,
fp, Operand::PointerOffsetFromSmiKey(
r0));
1518 __ LoadRoot(
ip, Heap::kUndefinedValueRootIndex);
1536 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
1539 LeaveArgumentsAdaptorFrame(masm);
1546 __ bind(&dont_adapt_arguments);
1555 #endif // V8_TARGET_ARCH_ARM
static const int kCodeOffset
static const int kCodeEntryOffset
static const int kPrototypeOrInitialMapOffset
static int SlotOffset(int index)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf map
static Smi * FromInt(int value)
static const int kGlobalReceiverOffset
static const int kConstructionCountOffset
static const int kDeoptimizationDataOffset
#define ASSERT(condition)
const RegList kJSCallerSaved
static const int kContextOffset
const int kPointerSizeLog2
static const int kInstanceSizeOffset
static const int kUnusedPropertyFieldsOffset
static const int kInstanceSizesOffset
MemOperand ContextOperand(Register context, int index)
static const int kContextOffset
static const int kPretenureCreateCountOffset
static const int kInObjectPropertiesByte
const uint32_t kNotStringTag
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
static const int kDontAdaptArgumentsSentinel
static void MaybeCallEntryHook(MacroAssembler *masm)
const RegList kCalleeSaved
static const int kConstantPoolOffset
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
static const int kCallerSPOffset
static const int kPropertiesOffset
static const int kExpressionsOffset
static const int kElementsOffset
static int OffsetOfElementAt(int index)
static const int kHeaderSize
static const int kMapOffset
static const int kFixedFrameSizeFromFp
const uint32_t kIsNotStringMask
static const int kLengthOffset
MemOperand FieldMemOperand(Register object, int offset)
static const int kFunctionOffset
static const int kFormalParameterCountOffset
static const int kHeaderSize
static const int kAllocationSiteOffset
#define ASSERT_EQ(v1, v2)
static const int kHeaderSize
static const int kPreAllocatedPropertyFieldsByte
static const int kInstrSize
static const int kValueOffset
static const int kNativeContextOffset
static const int kCompilerHintsOffset
static const int kSharedFunctionInfoOffset
static const int kConstantPoolOffset
static const int kInstanceTypeOffset