30 #if defined(V8_TARGET_ARCH_ARM)
42 #define __ ACCESS_MASM(masm)
45 void Builtins::Generate_Adaptor(MacroAssembler* masm,
60 int num_extra_args = 0;
70 __ add(
r0,
r0, Operand(num_extra_args + 1));
71 __ JumpToExternalReference(ExternalReference(
id, masm->isolate()));
76 static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
93 static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
110 static void AllocateEmptyJSArray(MacroAssembler* masm,
111 Register array_function,
116 Label* gc_required) {
119 __ LoadInitialArrayMap(array_function, scratch2, scratch1,
false);
124 if (initial_capacity > 0) {
127 __ AllocateInNewSpace(size,
140 __ LoadRoot(scratch1, Heap::kEmptyFixedArrayRootIndex);
146 if (initial_capacity == 0) {
166 __ LoadRoot(scratch3, Heap::kFixedArrayMapRootIndex);
175 __ LoadRoot(scratch3, Heap::kTheHoleValueRootIndex);
176 static const int kLoopUnfoldLimit = 4;
177 if (initial_capacity <= kLoopUnfoldLimit) {
178 for (
int i = 0; i < initial_capacity; i++) {
183 __ add(scratch2, scratch1, Operand(initial_capacity *
kPointerSize));
188 __ cmp(scratch1, scratch2);
202 static void AllocateJSArray(MacroAssembler* masm,
203 Register array_function,
206 Register elements_array_storage,
207 Register elements_array_end,
211 Label* gc_required) {
213 __ LoadInitialArrayMap(array_function, scratch2,
214 elements_array_storage, fill_with_hole);
216 if (FLAG_debug_code) {
217 __ tst(array_size, array_size);
218 __ Assert(
ne,
"array size is unexpectedly 0");
224 __ mov(elements_array_end,
226 __ add(elements_array_end,
229 __ AllocateInNewSpace(
243 __ LoadRoot(elements_array_storage, Heap::kEmptyFixedArrayRootIndex);
244 __ str(elements_array_storage,
254 __ str(elements_array_storage,
259 __ sub(elements_array_storage,
260 elements_array_storage,
267 __ LoadRoot(scratch1, Heap::kFixedArrayMapRootIndex);
280 __ add(elements_array_end,
281 elements_array_storage,
288 if (fill_with_hole) {
290 __ LoadRoot(scratch1, Heap::kTheHoleValueRootIndex);
296 __ cmp(elements_array_storage, elements_array_end);
315 static void ArrayNativeCode(MacroAssembler* masm,
316 Label* call_generic_code) {
317 Counters* counters = masm->isolate()->counters();
318 Label argc_one_or_more, argc_two_or_more, not_empty_array, empty_array,
319 has_non_smi_element, finish, cant_transition_map, not_double;
323 __ b(
ne, &argc_one_or_more);
326 __ bind(&empty_array);
327 AllocateEmptyJSArray(masm,
334 __ IncrementCounter(counters->array_function_native(), 1,
r3,
r4);
342 __ bind(&argc_one_or_more);
343 __ cmp(
r0, Operand(1));
344 __ b(
ne, &argc_two_or_more);
348 __ b(
ne, ¬_empty_array);
350 __ mov(
r0, Operand(0));
353 __ bind(¬_empty_array);
355 __ b(
ne, call_generic_code);
361 __ b(
ge, call_generic_code);
367 AllocateJSArray(masm,
377 __ IncrementCounter(counters->array_function_native(), 1,
r2,
r4);
384 __ bind(&argc_two_or_more);
391 AllocateJSArray(masm,
401 __ IncrementCounter(counters->array_function_native(), 1,
r2,
r6);
417 if (FLAG_smi_only_arrays) {
418 __ JumpIfNotSmi(
r2, &has_non_smi_element);
437 __ bind(&has_non_smi_element);
441 __ bind(&cant_transition_map);
442 __ UndoAllocationInNewSpace(
r3,
r4);
443 __ b(call_generic_code);
445 __ bind(¬_double);
453 &cant_transition_map);
455 __ RecordWriteField(
r3,
474 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
480 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
483 GenerateLoadInternalArrayFunction(masm,
r1);
485 if (FLAG_debug_code) {
489 __ Assert(
ne,
"Unexpected initial map for InternalArray function");
491 __ Assert(
eq,
"Unexpected initial map for InternalArray function");
496 ArrayNativeCode(masm, &generic_array_code);
500 __ bind(&generic_array_code);
502 Handle<Code> array_code =
503 masm->isolate()->builtins()->InternalArrayCodeGeneric();
504 __ Jump(array_code, RelocInfo::CODE_TARGET);
508 void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
514 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
517 GenerateLoadArrayFunction(masm,
r1);
519 if (FLAG_debug_code) {
523 __ Assert(
ne,
"Unexpected initial map for Array function");
525 __ Assert(
eq,
"Unexpected initial map for Array function");
529 ArrayNativeCode(masm, &generic_array_code);
533 __ bind(&generic_array_code);
535 Handle<Code> array_code =
536 masm->isolate()->builtins()->ArrayCodeGeneric();
537 __ Jump(array_code, RelocInfo::CODE_TARGET);
541 void Builtins::Generate_ArrayConstructCode(MacroAssembler* masm) {
548 Label generic_constructor;
550 if (FLAG_debug_code) {
556 __ Assert(
ne,
"Unexpected initial map for Array function");
558 __ Assert(
eq,
"Unexpected initial map for Array function");
562 ArrayNativeCode(masm, &generic_constructor);
566 __ bind(&generic_constructor);
567 Handle<Code> generic_construct_stub =
568 masm->isolate()->builtins()->JSConstructStubGeneric();
569 __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET);
573 void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
581 Counters* counters = masm->isolate()->counters();
582 __ IncrementCounter(counters->string_ctor_calls(), 1,
r2,
r3);
584 Register
function =
r1;
585 if (FLAG_debug_code) {
587 __ cmp(
function, Operand(
r2));
588 __ Assert(
eq,
"Unexpected String function");
594 __ b(
eq, &no_arguments);
596 __ sub(
r0,
r0, Operand(1));
601 Register argument =
r2;
602 Label not_cached, argument_is_string;
612 __ IncrementCounter(counters->string_ctor_cached_number(), 1,
r3,
r4);
613 __ bind(&argument_is_string);
631 __ LoadGlobalFunctionInitialMap(
function, map,
r4);
632 if (FLAG_debug_code) {
635 __ Assert(
eq,
"Unexpected string wrapper instance size");
638 __ Assert(
eq,
"Unexpected unused properties of string wrapper");
642 __ LoadRoot(
r3, Heap::kEmptyFixedArrayRootIndex);
655 Label convert_argument;
656 __ bind(¬_cached);
657 __ JumpIfSmi(
r0, &convert_argument);
664 __ b(
ne, &convert_argument);
665 __ mov(argument,
r0);
666 __ IncrementCounter(counters->string_ctor_conversions(), 1,
r3,
r4);
667 __ b(&argument_is_string);
670 __ bind(&convert_argument);
672 __ IncrementCounter(counters->string_ctor_conversions(), 1,
r3,
r4);
679 __ mov(argument,
r0);
680 __ b(&argument_is_string);
684 __ bind(&no_arguments);
685 __ LoadRoot(argument, Heap::kEmptyStringRootIndex);
687 __ b(&argument_is_string);
691 __ bind(&gc_required);
692 __ IncrementCounter(counters->string_ctor_gc_required(), 1,
r3,
r4);
696 __ CallRuntime(Runtime::kNewStringWrapper, 1);
702 static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
710 void Builtins::Generate_InRecompileQueue(MacroAssembler* masm) {
711 GenerateTailCallToSharedCode(masm);
715 void Builtins::Generate_ParallelRecompile(MacroAssembler* masm) {
725 __ CallRuntime(Runtime::kParallelRecompile, 1);
735 GenerateTailCallToSharedCode(masm);
739 static void Generate_JSConstructStubHelper(MacroAssembler* masm,
740 bool is_api_function,
741 bool count_constructions) {
750 ASSERT(!is_api_function || !count_constructions);
752 Isolate* isolate = masm->isolate();
756 FrameScope scope(masm, StackFrame::CONSTRUCT);
765 Label rt_call, allocated;
766 if (FLAG_inline_new) {
767 Label undo_allocation;
768 #ifdef ENABLE_DEBUGGER_SUPPORT
769 ExternalReference debug_step_in_fp =
770 ExternalReference::debug_step_in_fp_address(isolate);
771 __ mov(
r2, Operand(debug_step_in_fp));
780 __ JumpIfSmi(
r2, &rt_call);
792 if (count_constructions) {
798 __ ldrb(
r4, constructor_count);
800 __ strb(
r4, constructor_count);
807 __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
827 __ LoadRoot(
r6, Heap::kEmptyFixedArrayRootIndex);
844 __ LoadRoot(
r7, Heap::kUndefinedValueRootIndex);
845 if (count_constructions) {
851 if (FLAG_debug_code) {
853 __ Assert(
le,
"Unexpected number of pre-allocated property fields.");
855 __ InitializeFieldsWithFiller(
r5,
r0,
r7);
857 __ LoadRoot(
r7, Heap::kOnePointerFillerMapRootIndex);
859 __ InitializeFieldsWithFiller(
r5,
r6,
r7);
884 __ b(
eq, &allocated);
885 __ Assert(
pl,
"Property allocation count failed.");
894 __ AllocateInNewSpace(
907 __ LoadRoot(
r6, Heap::kFixedArrayMapRootIndex);
924 if (count_constructions) {
925 __ LoadRoot(
r7, Heap::kUndefinedValueRootIndex);
926 }
else if (FLAG_debug_code) {
927 __ LoadRoot(
r8, Heap::kUndefinedValueRootIndex);
929 __ Assert(
eq,
"Undefined value not loaded.");
956 __ bind(&undo_allocation);
957 __ UndoAllocationInNewSpace(
r4,
r5);
964 __ CallRuntime(Runtime::kNewObject, 1);
1008 if (is_api_function) {
1011 masm->isolate()->builtins()->HandleApiCallConstruct();
1012 ParameterCount expected(0);
1013 __ InvokeCode(code, expected, expected,
1016 ParameterCount actual(
r0);
1022 if (!is_api_function && !count_constructions) {
1023 masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
1036 Label use_receiver, exit;
1043 __ JumpIfSmi(
r0, &use_receiver);
1052 __ bind(&use_receiver);
1069 __ IncrementCounter(isolate->counters()->constructed_objects(), 1,
r1,
r2);
1074 void Builtins::Generate_JSConstructStubCountdown(MacroAssembler* masm) {
1075 Generate_JSConstructStubHelper(masm,
false,
true);
1079 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
1080 Generate_JSConstructStubHelper(masm,
false,
false);
1084 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
1085 Generate_JSConstructStubHelper(masm,
true,
false);
1089 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
1090 bool is_construct) {
1109 __ InitializeRootRegister();
1133 __ LoadRoot(
r4, Heap::kUndefinedValueRootIndex);
1147 ParameterCount actual(
r0);
1161 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
1162 Generate_JSEntryTrampolineHelper(masm,
false);
1166 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
1167 Generate_JSEntryTrampolineHelper(masm,
true);
1171 void Builtins::Generate_LazyCompile(MacroAssembler* masm) {
1183 __ CallRuntime(Runtime::kLazyCompile, 1);
1200 void Builtins::Generate_LazyRecompile(MacroAssembler* masm) {
1212 __ CallRuntime(Runtime::kLazyRecompile, 1);
1229 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
1236 __ CallRuntime(Runtime::kNotifyDeoptimized, 1);
1243 Label with_tos_register, unknown_state;
1245 __ b(
ne, &with_tos_register);
1249 __ bind(&with_tos_register);
1252 __ b(
ne, &unknown_state);
1256 __ bind(&unknown_state);
1257 __ stop(
"no cases left");
1261 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1266 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
1271 void Builtins::Generate_NotifyOSR(MacroAssembler* masm) {
1279 __ CallRuntime(Runtime::kNotifyOSR, 0);
1286 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1287 CpuFeatures::TryForceFeatureScope scope(
VFP3);
1289 __ Abort(
"Unreachable code: Cannot optimize without VFP3 support.");
1299 __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
1321 void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
1325 __ cmp(
r0, Operand(0));
1327 __ LoadRoot(
r2, Heap::kUndefinedValueRootIndex);
1329 __ add(
r0,
r0, Operand(1));
1336 Label slow, non_function;
1338 __ JumpIfSmi(
r1, &non_function);
1345 Label shift_arguments;
1347 { Label convert_to_object, use_global_receiver, patch_receiver;
1356 __ b(
ne, &shift_arguments);
1360 __ b(
ne, &shift_arguments);
1368 __ JumpIfSmi(
r2, &convert_to_object);
1370 __ LoadRoot(
r3, Heap::kUndefinedValueRootIndex);
1372 __ b(
eq, &use_global_receiver);
1373 __ LoadRoot(
r3, Heap::kNullValueRootIndex);
1375 __ b(
eq, &use_global_receiver);
1379 __ b(
ge, &shift_arguments);
1381 __ bind(&convert_to_object);
1402 __ jmp(&patch_receiver);
1406 __ bind(&use_global_receiver);
1407 const int kGlobalIndex =
1414 __ bind(&patch_receiver);
1418 __ jmp(&shift_arguments);
1425 __ b(
eq, &shift_arguments);
1426 __ bind(&non_function);
1445 __ bind(&shift_arguments);
1453 __ sub(
r2,
r2, Operand(kPointerSize));
1458 __ sub(
r0,
r0, Operand(1));
1467 { Label
function, non_proxy;
1469 __ b(
eq, &
function);
1473 __ cmp(
r4, Operand(1));
1474 __ b(
ne, &non_proxy);
1477 __ add(
r0,
r0, Operand(1));
1478 __ GetBuiltinEntry(
r3, Builtins::CALL_FUNCTION_PROXY);
1479 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1480 RelocInfo::CODE_TARGET);
1482 __ bind(&non_proxy);
1483 __ GetBuiltinEntry(
r3, Builtins::CALL_NON_FUNCTION);
1484 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1485 RelocInfo::CODE_TARGET);
1501 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1502 RelocInfo::CODE_TARGET,
1505 ParameterCount expected(0);
1511 void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
1531 __ LoadRoot(
r2, Heap::kRealStackLimitRootIndex);
1556 Label push_receiver;
1559 __ b(
ne, &push_receiver);
1568 Label call_to_object, use_global_receiver;
1572 __ b(
ne, &push_receiver);
1576 __ b(
ne, &push_receiver);
1579 __ JumpIfSmi(
r0, &call_to_object);
1580 __ LoadRoot(
r1, Heap::kNullValueRootIndex);
1582 __ b(
eq, &use_global_receiver);
1583 __ LoadRoot(
r1, Heap::kUndefinedValueRootIndex);
1585 __ b(
eq, &use_global_receiver);
1591 __ b(
ge, &push_receiver);
1595 __ bind(&call_to_object);
1598 __ b(&push_receiver);
1601 __ bind(&use_global_receiver);
1602 const int kGlobalOffset =
1611 __ bind(&push_receiver);
1628 __ CallRuntime(Runtime::kGetProperty, 2);
1645 ParameterCount actual(
r0);
1649 __ b(
ne, &call_proxy);
1653 frame_scope.GenerateLeaveFrame();
1654 __ add(
sp,
sp, Operand(3 * kPointerSize));
1658 __ bind(&call_proxy);
1660 __ add(
r0,
r0, Operand(1));
1663 __ GetBuiltinEntry(
r3, Builtins::CALL_FUNCTION_PROXY);
1664 __ Call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1665 RelocInfo::CODE_TARGET);
1669 __ add(
sp,
sp, Operand(3 * kPointerSize));
1674 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1678 __ add(
fp,
sp, Operand(3 * kPointerSize));
1682 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1692 __ add(
sp,
sp, Operand(kPointerSize));
1696 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
1705 Label invoke, dont_adapt_arguments;
1707 Label enough, too_few;
1711 __ b(
eq, &dont_adapt_arguments);
1715 EnterArgumentsAdaptorFrame(masm);
1724 __ add(
r0,
r0, Operand(2 * kPointerSize));
1738 __ sub(
r0,
r0, Operand(kPointerSize));
1746 EnterArgumentsAdaptorFrame(masm);
1766 __ sub(
r0,
r0, Operand(kPointerSize));
1773 __ LoadRoot(
ip, Heap::kUndefinedValueRootIndex);
1775 __ sub(
r2,
r2, Operand(4 * kPointerSize));
1789 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
1792 LeaveArgumentsAdaptorFrame(masm);
1799 __ bind(&dont_adapt_arguments);
1808 #endif // V8_TARGET_ARCH_ARM
const intptr_t kSmiTagMask
static const int kCodeOffset
static const int kCodeEntryOffset
static const int kPrototypeOrInitialMapOffset
static int SlotOffset(int index)
static Smi * FromInt(int value)
const intptr_t kIntptrSignBit
static const int kGlobalReceiverOffset
static const int kConstructionCountOffset
static const int kCallerSPOffset
#define ASSERT(condition)
const RegList kJSCallerSaved
const int kPointerSizeLog2
static const int kInstanceSizeOffset
static const int kUnusedPropertyFieldsOffset
static const int kInstanceSizesOffset
static const int kContextOffset
static const int kInObjectPropertiesByte
const uint32_t kNotStringTag
STATIC_ASSERT((FixedDoubleArray::kHeaderSize &kDoubleAlignmentMask)==0)
static const int kDontAdaptArgumentsSentinel
const RegList kCalleeSaved
static const int kPropertiesOffset
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print more details following each garbage collection print amount of external allocated memory after each time it is adjusted flush code that we expect not to use again before full gc do incremental marking steps track object counts and memory usage use caching Perform compaction on every full GC Never perform compaction on full GC testing only Compact code space on full incremental collections Default seed for initializing random generator(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer
static const int kElementsOffset
static bool SupportsCrankshaft()
static const int kLengthOffset
static int SizeFor(int length)
static const int kHeaderSize
static const int kMapOffset
const uint32_t kIsNotStringMask
static const int kLengthOffset
MemOperand FieldMemOperand(Register object, int offset)
static const int kContextOffset
static const int kFunctionOffset
static const int kFormalParameterCountOffset
static const int kHeaderSize
#define ASSERT_EQ(v1, v2)
static const int kHeaderSize
static void GenerateLookupNumberStringCache(MacroAssembler *masm, Register object, Register result, Register scratch1, Register scratch2, Register scratch3, bool object_is_smi, Label *not_found)
static const int kPreAllocatedPropertyFieldsByte
static const int kPreallocatedArrayElements
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
static const int kValueOffset
static const int kNativeContextOffset
static const int kCompilerHintsOffset
static const int kSharedFunctionInfoOffset
static const int kInitialMaxFastElementArray
static const int kInstanceTypeOffset