30 #if defined(V8_TARGET_ARCH_ARM)
42 #define __ ACCESS_MASM(masm)
45 void Builtins::Generate_Adaptor(MacroAssembler* masm,
60 int num_extra_args = 0;
70 __ add(
r0,
r0, Operand(num_extra_args + 1));
71 __ JumpToExternalReference(ExternalReference(
id, masm->isolate()));
76 static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
92 static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
108 static void AllocateEmptyJSArray(MacroAssembler* masm,
109 Register array_function,
114 Label* gc_required) {
117 __ LoadInitialArrayMap(array_function, scratch2, scratch1,
false);
122 if (initial_capacity > 0) {
125 __ AllocateInNewSpace(size,
138 __ LoadRoot(scratch1, Heap::kEmptyFixedArrayRootIndex);
144 if (initial_capacity == 0) {
164 __ LoadRoot(scratch3, Heap::kFixedArrayMapRootIndex);
173 __ LoadRoot(scratch3, Heap::kTheHoleValueRootIndex);
174 static const int kLoopUnfoldLimit = 4;
175 if (initial_capacity <= kLoopUnfoldLimit) {
176 for (
int i = 0; i < initial_capacity; i++) {
181 __ add(scratch2, scratch1, Operand(initial_capacity *
kPointerSize));
186 __ cmp(scratch1, scratch2);
200 static void AllocateJSArray(MacroAssembler* masm,
201 Register array_function,
204 Register elements_array_storage,
205 Register elements_array_end,
209 Label* gc_required) {
211 __ LoadInitialArrayMap(array_function, scratch2,
212 elements_array_storage, fill_with_hole);
214 if (FLAG_debug_code) {
215 __ tst(array_size, array_size);
216 __ Assert(
ne,
"array size is unexpectedly 0");
222 __ mov(elements_array_end,
224 __ add(elements_array_end,
227 __ AllocateInNewSpace(
241 __ LoadRoot(elements_array_storage, Heap::kEmptyFixedArrayRootIndex);
242 __ str(elements_array_storage,
252 __ str(elements_array_storage,
257 __ sub(elements_array_storage,
258 elements_array_storage,
265 __ LoadRoot(scratch1, Heap::kFixedArrayMapRootIndex);
278 __ add(elements_array_end,
279 elements_array_storage,
286 if (fill_with_hole) {
288 __ LoadRoot(scratch1, Heap::kTheHoleValueRootIndex);
294 __ cmp(elements_array_storage, elements_array_end);
313 static void ArrayNativeCode(MacroAssembler* masm,
314 Label* call_generic_code) {
315 Counters* counters = masm->isolate()->counters();
316 Label argc_one_or_more, argc_two_or_more, not_empty_array, empty_array,
317 has_non_smi_element, finish, cant_transition_map, not_double;
321 __ b(
ne, &argc_one_or_more);
324 __ bind(&empty_array);
325 AllocateEmptyJSArray(masm,
332 __ IncrementCounter(counters->array_function_native(), 1,
r3,
r4);
340 __ bind(&argc_one_or_more);
341 __ cmp(
r0, Operand(1));
342 __ b(
ne, &argc_two_or_more);
346 __ b(
ne, ¬_empty_array);
348 __ mov(
r0, Operand(0));
351 __ bind(¬_empty_array);
353 __ b(
ne, call_generic_code);
359 __ b(
ge, call_generic_code);
365 AllocateJSArray(masm,
375 __ IncrementCounter(counters->array_function_native(), 1,
r2,
r4);
382 __ bind(&argc_two_or_more);
389 AllocateJSArray(masm,
399 __ IncrementCounter(counters->array_function_native(), 1,
r2,
r6);
415 if (FLAG_smi_only_arrays) {
416 __ JumpIfNotSmi(
r2, &has_non_smi_element);
435 __ bind(&has_non_smi_element);
439 __ bind(&cant_transition_map);
440 __ UndoAllocationInNewSpace(
r3,
r4);
441 __ b(call_generic_code);
443 __ bind(¬_double);
451 &cant_transition_map);
453 __ RecordWriteField(
r3,
472 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
478 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
481 GenerateLoadInternalArrayFunction(masm,
r1);
483 if (FLAG_debug_code) {
487 __ Assert(
ne,
"Unexpected initial map for InternalArray function");
489 __ Assert(
eq,
"Unexpected initial map for InternalArray function");
494 ArrayNativeCode(masm, &generic_array_code);
498 __ bind(&generic_array_code);
500 Handle<Code> array_code =
501 masm->isolate()->builtins()->InternalArrayCodeGeneric();
502 __ Jump(array_code, RelocInfo::CODE_TARGET);
506 void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
512 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
515 GenerateLoadArrayFunction(masm,
r1);
517 if (FLAG_debug_code) {
521 __ Assert(
ne,
"Unexpected initial map for Array function");
523 __ Assert(
eq,
"Unexpected initial map for Array function");
527 ArrayNativeCode(masm, &generic_array_code);
531 __ bind(&generic_array_code);
533 Handle<Code> array_code =
534 masm->isolate()->builtins()->ArrayCodeGeneric();
535 __ Jump(array_code, RelocInfo::CODE_TARGET);
539 void Builtins::Generate_ArrayConstructCode(MacroAssembler* masm) {
546 Label generic_constructor;
548 if (FLAG_debug_code) {
554 __ Assert(
ne,
"Unexpected initial map for Array function");
556 __ Assert(
eq,
"Unexpected initial map for Array function");
560 ArrayNativeCode(masm, &generic_constructor);
564 __ bind(&generic_constructor);
565 Handle<Code> generic_construct_stub =
566 masm->isolate()->builtins()->JSConstructStubGeneric();
567 __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET);
571 void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
579 Counters* counters = masm->isolate()->counters();
580 __ IncrementCounter(counters->string_ctor_calls(), 1,
r2,
r3);
582 Register
function =
r1;
583 if (FLAG_debug_code) {
585 __ cmp(
function, Operand(
r2));
586 __ Assert(
eq,
"Unexpected String function");
592 __ b(
eq, &no_arguments);
594 __ sub(
r0,
r0, Operand(1));
599 Register argument =
r2;
600 Label not_cached, argument_is_string;
610 __ IncrementCounter(counters->string_ctor_cached_number(), 1,
r3,
r4);
611 __ bind(&argument_is_string);
629 __ LoadGlobalFunctionInitialMap(
function, map,
r4);
630 if (FLAG_debug_code) {
633 __ Assert(
eq,
"Unexpected string wrapper instance size");
636 __ Assert(
eq,
"Unexpected unused properties of string wrapper");
640 __ LoadRoot(
r3, Heap::kEmptyFixedArrayRootIndex);
653 Label convert_argument;
654 __ bind(¬_cached);
655 __ JumpIfSmi(
r0, &convert_argument);
662 __ b(
ne, &convert_argument);
663 __ mov(argument,
r0);
664 __ IncrementCounter(counters->string_ctor_conversions(), 1,
r3,
r4);
665 __ b(&argument_is_string);
668 __ bind(&convert_argument);
670 __ IncrementCounter(counters->string_ctor_conversions(), 1,
r3,
r4);
677 __ mov(argument,
r0);
678 __ b(&argument_is_string);
682 __ bind(&no_arguments);
683 __ LoadRoot(argument, Heap::kEmptyStringRootIndex);
685 __ b(&argument_is_string);
689 __ bind(&gc_required);
690 __ IncrementCounter(counters->string_ctor_gc_required(), 1,
r3,
r4);
694 __ CallRuntime(Runtime::kNewStringWrapper, 1);
700 static void Generate_JSConstructStubHelper(MacroAssembler* masm,
701 bool is_api_function,
702 bool count_constructions) {
711 ASSERT(!is_api_function || !count_constructions);
713 Isolate* isolate = masm->isolate();
717 FrameScope scope(masm, StackFrame::CONSTRUCT);
726 Label rt_call, allocated;
727 if (FLAG_inline_new) {
728 Label undo_allocation;
729 #ifdef ENABLE_DEBUGGER_SUPPORT
730 ExternalReference debug_step_in_fp =
731 ExternalReference::debug_step_in_fp_address(isolate);
732 __ mov(
r2, Operand(debug_step_in_fp));
741 __ JumpIfSmi(
r2, &rt_call);
753 if (count_constructions) {
759 __ ldrb(
r4, constructor_count);
761 __ strb(
r4, constructor_count);
768 __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
788 __ LoadRoot(
r6, Heap::kEmptyFixedArrayRootIndex);
805 __ LoadRoot(
r7, Heap::kUndefinedValueRootIndex);
806 if (count_constructions) {
812 if (FLAG_debug_code) {
814 __ Assert(
le,
"Unexpected number of pre-allocated property fields.");
816 __ InitializeFieldsWithFiller(
r5,
r0,
r7);
818 __ LoadRoot(
r7, Heap::kOnePointerFillerMapRootIndex);
820 __ InitializeFieldsWithFiller(
r5,
r6,
r7);
845 __ b(
eq, &allocated);
846 __ Assert(
pl,
"Property allocation count failed.");
855 __ AllocateInNewSpace(
868 __ LoadRoot(
r6, Heap::kFixedArrayMapRootIndex);
885 if (count_constructions) {
886 __ LoadRoot(
r7, Heap::kUndefinedValueRootIndex);
887 }
else if (FLAG_debug_code) {
888 __ LoadRoot(
r8, Heap::kUndefinedValueRootIndex);
890 __ Assert(
eq,
"Undefined value not loaded.");
917 __ bind(&undo_allocation);
918 __ UndoAllocationInNewSpace(
r4,
r5);
925 __ CallRuntime(Runtime::kNewObject, 1);
969 if (is_api_function) {
972 masm->isolate()->builtins()->HandleApiCallConstruct();
973 ParameterCount expected(0);
974 __ InvokeCode(code, expected, expected,
977 ParameterCount actual(
r0);
983 if (!is_api_function && !count_constructions) {
984 masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
997 Label use_receiver, exit;
1004 __ JumpIfSmi(
r0, &use_receiver);
1013 __ bind(&use_receiver);
1030 __ IncrementCounter(isolate->counters()->constructed_objects(), 1,
r1,
r2);
1035 void Builtins::Generate_JSConstructStubCountdown(MacroAssembler* masm) {
1036 Generate_JSConstructStubHelper(masm,
false,
true);
1040 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
1041 Generate_JSConstructStubHelper(masm,
false,
false);
1045 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
1046 Generate_JSConstructStubHelper(masm,
true,
false);
1050 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
1051 bool is_construct) {
1070 __ InitializeRootRegister();
1094 __ LoadRoot(
r4, Heap::kUndefinedValueRootIndex);
1108 ParameterCount actual(
r0);
1122 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
1123 Generate_JSEntryTrampolineHelper(masm,
false);
1127 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
1128 Generate_JSEntryTrampolineHelper(masm,
true);
1132 void Builtins::Generate_LazyCompile(MacroAssembler* masm) {
1144 __ CallRuntime(Runtime::kLazyCompile, 1);
1161 void Builtins::Generate_LazyRecompile(MacroAssembler* masm) {
1173 __ CallRuntime(Runtime::kLazyRecompile, 1);
1190 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
1197 __ CallRuntime(Runtime::kNotifyDeoptimized, 1);
1204 Label with_tos_register, unknown_state;
1206 __ b(
ne, &with_tos_register);
1210 __ bind(&with_tos_register);
1213 __ b(
ne, &unknown_state);
1217 __ bind(&unknown_state);
1218 __ stop(
"no cases left");
1222 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1227 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
1232 void Builtins::Generate_NotifyOSR(MacroAssembler* masm) {
1240 __ CallRuntime(Runtime::kNotifyOSR, 0);
1247 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1248 CpuFeatures::TryForceFeatureScope scope(
VFP3);
1250 __ Abort(
"Unreachable code: Cannot optimize without VFP3 support.");
1260 __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
1278 generator.Generate();
1282 void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
1286 __ cmp(
r0, Operand(0));
1288 __ LoadRoot(
r2, Heap::kUndefinedValueRootIndex);
1290 __ add(
r0,
r0, Operand(1));
1297 Label slow, non_function;
1299 __ JumpIfSmi(
r1, &non_function);
1306 Label shift_arguments;
1308 { Label convert_to_object, use_global_receiver, patch_receiver;
1317 __ b(
ne, &shift_arguments);
1321 __ b(
ne, &shift_arguments);
1329 __ JumpIfSmi(
r2, &convert_to_object);
1331 __ LoadRoot(
r3, Heap::kUndefinedValueRootIndex);
1333 __ b(
eq, &use_global_receiver);
1334 __ LoadRoot(
r3, Heap::kNullValueRootIndex);
1336 __ b(
eq, &use_global_receiver);
1340 __ b(
ge, &shift_arguments);
1342 __ bind(&convert_to_object);
1363 __ jmp(&patch_receiver);
1367 __ bind(&use_global_receiver);
1368 const int kGlobalIndex =
1375 __ bind(&patch_receiver);
1379 __ jmp(&shift_arguments);
1386 __ b(
eq, &shift_arguments);
1387 __ bind(&non_function);
1406 __ bind(&shift_arguments);
1414 __ sub(
r2,
r2, Operand(kPointerSize));
1419 __ sub(
r0,
r0, Operand(1));
1428 { Label
function, non_proxy;
1430 __ b(
eq, &
function);
1434 __ cmp(
r4, Operand(1));
1435 __ b(
ne, &non_proxy);
1438 __ add(
r0,
r0, Operand(1));
1439 __ GetBuiltinEntry(
r3, Builtins::CALL_FUNCTION_PROXY);
1440 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1441 RelocInfo::CODE_TARGET);
1443 __ bind(&non_proxy);
1444 __ GetBuiltinEntry(
r3, Builtins::CALL_NON_FUNCTION);
1445 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1446 RelocInfo::CODE_TARGET);
1462 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1463 RelocInfo::CODE_TARGET,
1466 ParameterCount expected(0);
1472 void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
1492 __ LoadRoot(
r2, Heap::kRealStackLimitRootIndex);
1517 Label push_receiver;
1520 __ b(
ne, &push_receiver);
1529 Label call_to_object, use_global_receiver;
1533 __ b(
ne, &push_receiver);
1537 __ b(
ne, &push_receiver);
1540 __ JumpIfSmi(
r0, &call_to_object);
1541 __ LoadRoot(
r1, Heap::kNullValueRootIndex);
1543 __ b(
eq, &use_global_receiver);
1544 __ LoadRoot(
r1, Heap::kUndefinedValueRootIndex);
1546 __ b(
eq, &use_global_receiver);
1552 __ b(
ge, &push_receiver);
1556 __ bind(&call_to_object);
1559 __ b(&push_receiver);
1562 __ bind(&use_global_receiver);
1563 const int kGlobalOffset =
1572 __ bind(&push_receiver);
1589 __ CallRuntime(Runtime::kGetProperty, 2);
1606 ParameterCount actual(
r0);
1610 __ b(
ne, &call_proxy);
1614 frame_scope.GenerateLeaveFrame();
1615 __ add(
sp,
sp, Operand(3 * kPointerSize));
1619 __ bind(&call_proxy);
1621 __ add(
r0,
r0, Operand(1));
1624 __ GetBuiltinEntry(
r3, Builtins::CALL_FUNCTION_PROXY);
1625 __ Call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1626 RelocInfo::CODE_TARGET);
1630 __ add(
sp,
sp, Operand(3 * kPointerSize));
1635 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1639 __ add(
fp,
sp, Operand(3 * kPointerSize));
1643 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1653 __ add(
sp,
sp, Operand(kPointerSize));
1657 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
1666 Label invoke, dont_adapt_arguments;
1668 Label enough, too_few;
1672 __ b(
eq, &dont_adapt_arguments);
1676 EnterArgumentsAdaptorFrame(masm);
1685 __ add(
r0,
r0, Operand(2 * kPointerSize));
1699 __ sub(
r0,
r0, Operand(kPointerSize));
1707 EnterArgumentsAdaptorFrame(masm);
1727 __ sub(
r0,
r0, Operand(kPointerSize));
1734 __ LoadRoot(
ip, Heap::kUndefinedValueRootIndex);
1736 __ sub(
r2,
r2, Operand(4 * kPointerSize));
1750 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
1753 LeaveArgumentsAdaptorFrame(masm);
1760 __ bind(&dont_adapt_arguments);
1769 #endif // V8_TARGET_ARCH_ARM
const intptr_t kSmiTagMask
static const int kCodeEntryOffset
static const int kPrototypeOrInitialMapOffset
static int SlotOffset(int index)
static Smi * FromInt(int value)
const intptr_t kIntptrSignBit
static const int kGlobalReceiverOffset
static const int kConstructionCountOffset
static bool IsSupported(CpuFeature f)
static const int kCallerSPOffset
#define ASSERT(condition)
const RegList kJSCallerSaved
const int kPointerSizeLog2
static const int kInstanceSizeOffset
static const int kUnusedPropertyFieldsOffset
static const int kInstanceSizesOffset
static const int kGlobalContextOffset
static const int kContextOffset
static const int kInObjectPropertiesByte
const uint32_t kNotStringTag
STATIC_ASSERT((FixedDoubleArray::kHeaderSize &kDoubleAlignmentMask)==0)
static const int kDontAdaptArgumentsSentinel
const RegList kCalleeSaved
static const int kPropertiesOffset
static const int kElementsOffset
static const int kLengthOffset
static int SizeFor(int length)
static const int kHeaderSize
static const int kMapOffset
const uint32_t kIsNotStringMask
static const int kLengthOffset
MemOperand FieldMemOperand(Register object, int offset)
static const int kContextOffset
static const int kFunctionOffset
static const int kFormalParameterCountOffset
static const int kHeaderSize
#define ASSERT_EQ(v1, v2)
static const int kHeaderSize
static void GenerateLookupNumberStringCache(MacroAssembler *masm, Register object, Register result, Register scratch1, Register scratch2, Register scratch3, bool object_is_smi, Label *not_found)
static const int kPreAllocatedPropertyFieldsByte
static const int kPreallocatedArrayElements
static const int kValueOffset
static const int kCompilerHintsOffset
static const int kSharedFunctionInfoOffset
static const int kInitialMaxFastElementArray
static const int kInstanceTypeOffset