32 #if defined(V8_TARGET_ARCH_MIPS)
44 #define __ ACCESS_MASM(masm)
47 void Builtins::Generate_Adaptor(MacroAssembler* masm,
62 int num_extra_args = 0;
72 __ Addu(
s0, a0, num_extra_args + 1);
75 __ JumpToExternalReference(ExternalReference(
id, masm->isolate()));
80 static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
96 static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
112 static void AllocateEmptyJSArray(MacroAssembler* masm,
113 Register array_function,
118 Label* gc_required) {
121 __ LoadInitialArrayMap(array_function, scratch2, scratch1,
false);
126 if (initial_capacity > 0) {
129 __ AllocateInNewSpace(size,
141 __ LoadRoot(scratch1, Heap::kEmptyFixedArrayRootIndex);
144 __ mov(scratch3, zero_reg);
147 if (initial_capacity == 0) {
167 __ LoadRoot(scratch3, Heap::kFixedArrayMapRootIndex);
178 __ LoadRoot(scratch3, Heap::kTheHoleValueRootIndex);
179 static const int kLoopUnfoldLimit = 4;
180 if (initial_capacity <= kLoopUnfoldLimit) {
181 for (
int i = 0; i < initial_capacity; i++) {
186 __ Addu(scratch2, scratch1, Operand(initial_capacity *
kPointerSize));
192 __ Branch(&loop,
lt, scratch1, Operand(scratch2));
206 static void AllocateJSArray(MacroAssembler* masm,
207 Register array_function,
210 Register elements_array_storage,
211 Register elements_array_end,
215 Label* gc_required) {
217 __ LoadInitialArrayMap(array_function, scratch2,
218 elements_array_storage, fill_with_hole);
220 if (FLAG_debug_code) {
222 ne,
"array size is unexpectedly 0", array_size, Operand(zero_reg));
228 __ li(elements_array_end,
231 __ Addu(elements_array_end, elements_array_end, scratch1);
232 __ AllocateInNewSpace(
246 __ LoadRoot(elements_array_storage, Heap::kEmptyFixedArrayRootIndex);
247 __ sw(elements_array_storage,
257 __ sw(elements_array_storage,
261 __ And(elements_array_storage,
262 elements_array_storage,
269 __ LoadRoot(scratch1, Heap::kFixedArrayMapRootIndex);
272 __ Addu(elements_array_storage, elements_array_storage,
kPointerSize);
281 __ Addu(elements_array_storage, elements_array_storage,
kPointerSize);
289 __ Addu(elements_array_end, elements_array_storage, elements_array_end);
295 if (fill_with_hole) {
297 __ LoadRoot(scratch1, Heap::kTheHoleValueRootIndex);
301 __ Addu(elements_array_storage, elements_array_storage,
kPointerSize);
304 __ Branch(&loop,
lt, elements_array_storage, Operand(elements_array_end));
323 static void ArrayNativeCode(MacroAssembler* masm,
324 Label* call_generic_code) {
325 Counters* counters = masm->isolate()->counters();
326 Label argc_one_or_more, argc_two_or_more, not_empty_array, empty_array,
327 has_non_smi_element, finish, cant_transition_map, not_double;
330 __ Branch(&argc_one_or_more,
ne, a0, Operand(zero_reg));
332 __ bind(&empty_array);
333 AllocateEmptyJSArray(masm,
340 __ IncrementCounter(counters->array_function_native(), 1, a3, t0);
348 __ bind(&argc_one_or_more);
349 __ Branch(&argc_two_or_more,
ne, a0, Operand(1));
353 __ Branch(¬_empty_array,
ne, a2, Operand(zero_reg));
355 __ mov(a0, zero_reg);
356 __ Branch(&empty_array);
358 __ bind(¬_empty_array);
360 __ Branch(call_generic_code,
eq, a3, Operand(zero_reg));
372 AllocateJSArray(masm,
382 __ IncrementCounter(counters->array_function_native(), 1, a2, t0);
390 __ bind(&argc_two_or_more);
397 AllocateJSArray(masm,
407 __ IncrementCounter(counters->array_function_native(), 1, a2, t2);
423 if (FLAG_smi_only_arrays) {
424 __ JumpIfNotSmi(a2, &has_non_smi_element);
430 __ Branch(&loop,
lt, t0, Operand(t1));
444 __ bind(&has_non_smi_element);
448 __ bind(&cant_transition_map);
449 __ UndoAllocationInNewSpace(a3, t0);
450 __ Branch(call_generic_code);
452 __ bind(¬_double);
460 &cant_transition_map);
462 __ RecordWriteField(a3,
476 __ Branch(&loop2,
lt, t0, Operand(t1));
481 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
487 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
490 GenerateLoadInternalArrayFunction(masm, a1);
492 if (FLAG_debug_code) {
496 __ Assert(
ne,
"Unexpected initial map for InternalArray function",
497 t0, Operand(zero_reg));
498 __ GetObjectType(a2, a3, t0);
499 __ Assert(
eq,
"Unexpected initial map for InternalArray function",
505 ArrayNativeCode(masm, &generic_array_code);
509 __ bind(&generic_array_code);
511 Handle<Code> array_code =
512 masm->isolate()->builtins()->InternalArrayCodeGeneric();
513 __ Jump(array_code, RelocInfo::CODE_TARGET);
517 void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
523 Label generic_array_code;
526 GenerateLoadArrayFunction(masm, a1);
528 if (FLAG_debug_code) {
532 __ Assert(
ne,
"Unexpected initial map for Array function (1)",
533 t0, Operand(zero_reg));
534 __ GetObjectType(a2, a3, t0);
535 __ Assert(
eq,
"Unexpected initial map for Array function (2)",
540 ArrayNativeCode(masm, &generic_array_code);
544 __ bind(&generic_array_code);
546 Handle<Code> array_code =
547 masm->isolate()->builtins()->ArrayCodeGeneric();
548 __ Jump(array_code, RelocInfo::CODE_TARGET);
552 void Builtins::Generate_ArrayConstructCode(MacroAssembler* masm) {
559 Label generic_constructor;
561 if (FLAG_debug_code) {
567 __ Assert(
ne,
"Unexpected initial map for Array function (3)",
568 t0, Operand(zero_reg));
569 __ GetObjectType(a2, a3, t0);
570 __ Assert(
eq,
"Unexpected initial map for Array function (4)",
575 ArrayNativeCode(masm, &generic_constructor);
579 __ bind(&generic_constructor);
581 Handle<Code> generic_construct_stub =
582 masm->isolate()->builtins()->JSConstructStubGeneric();
583 __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET);
587 void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
595 Counters* counters = masm->isolate()->counters();
596 __ IncrementCounter(counters->string_ctor_calls(), 1, a2, a3);
598 Register
function = a1;
599 if (FLAG_debug_code) {
601 __ Assert(
eq,
"Unexpected String function",
function, Operand(a2));
606 __ Branch(&no_arguments,
eq, a0, Operand(zero_reg));
608 __ Subu(a0, a0, Operand(1));
615 Register argument = a2;
616 Label not_cached, argument_is_string;
626 __ IncrementCounter(counters->string_ctor_cached_number(), 1, a3, t0);
627 __ bind(&argument_is_string);
645 __ LoadGlobalFunctionInitialMap(
function, map, t0);
646 if (FLAG_debug_code) {
648 __ Assert(
eq,
"Unexpected string wrapper instance size",
651 __ Assert(
eq,
"Unexpected unused properties of string wrapper",
652 t0, Operand(zero_reg));
656 __ LoadRoot(a3, Heap::kEmptyFixedArrayRootIndex);
669 Label convert_argument;
670 __ bind(¬_cached);
671 __ JumpIfSmi(a0, &convert_argument);
678 __ Branch(&convert_argument,
ne, t0, Operand(zero_reg));
679 __ mov(argument, a0);
680 __ IncrementCounter(counters->string_ctor_conversions(), 1, a3, t0);
681 __ Branch(&argument_is_string);
684 __ bind(&convert_argument);
686 __ IncrementCounter(counters->string_ctor_conversions(), 1, a3, t0);
693 __ mov(argument, v0);
694 __ Branch(&argument_is_string);
698 __ bind(&no_arguments);
699 __ LoadRoot(argument, Heap::kEmptyStringRootIndex);
701 __ Branch(&argument_is_string);
705 __ bind(&gc_required);
706 __ IncrementCounter(counters->string_ctor_gc_required(), 1, a3, t0);
710 __ CallRuntime(Runtime::kNewStringWrapper, 1);
716 static void Generate_JSConstructStubHelper(MacroAssembler* masm,
717 bool is_api_function,
718 bool count_constructions) {
727 ASSERT(!is_api_function || !count_constructions);
729 Isolate* isolate = masm->isolate();
740 FrameScope scope(masm, StackFrame::CONSTRUCT);
744 __ MultiPushReversed(a0.bit() | a1.bit());
747 __ LoadRoot(t7, Heap::kUndefinedValueRootIndex);
749 Label rt_call, allocated;
752 if (FLAG_inline_new) {
753 Label undo_allocation;
754 #ifdef ENABLE_DEBUGGER_SUPPORT
755 ExternalReference debug_step_in_fp =
756 ExternalReference::debug_step_in_fp_address(isolate);
757 __ li(a2, Operand(debug_step_in_fp));
759 __ Branch(&rt_call,
ne, a2, Operand(zero_reg));
765 __ JumpIfSmi(a2, &rt_call);
766 __ GetObjectType(a2, a3, t4);
777 if (count_constructions) {
783 __ lbu(t0, constructor_count);
784 __ Subu(t0, t0, Operand(1));
785 __ sb(t0, constructor_count);
786 __ Branch(&allocate,
ne, t0, Operand(zero_reg));
792 __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
812 __ LoadRoot(t6, Heap::kEmptyFixedArrayRootIndex);
831 __ LoadRoot(t7, Heap::kUndefinedValueRootIndex);
832 if (count_constructions) {
839 if (FLAG_debug_code) {
840 __ Assert(
le,
"Unexpected number of pre-allocated property fields.",
843 __ InitializeFieldsWithFiller(t5, a0, t7);
845 __ LoadRoot(t7, Heap::kOnePointerFillerMapRootIndex);
847 __ InitializeFieldsWithFiller(t5, t6, t7);
866 __ Addu(a3, a3, Operand(t6));
872 __ Branch(&allocated,
eq, a3, Operand(zero_reg));
874 a3, Operand(zero_reg));
883 __ AllocateInNewSpace(
896 __ LoadRoot(t6, Heap::kFixedArrayMapRootIndex);
916 if (count_constructions) {
917 __ LoadRoot(t7, Heap::kUndefinedValueRootIndex);
918 }
else if (FLAG_debug_code) {
919 __ LoadRoot(t8, Heap::kUndefinedValueRootIndex);
920 __ Assert(
eq,
"Undefined value not loaded.", t7, Operand(t8));
927 __ Branch(&loop,
less, a2, Operand(t6));
947 __ bind(&undo_allocation);
948 __ UndoAllocationInNewSpace(t4, t5);
955 __ CallRuntime(Runtime::kNewObject, 1);
991 __ Addu(t0, a2, Operand(t0));
995 __ Addu(a3, a3, Operand(-2));
1001 if (is_api_function) {
1004 masm->isolate()->builtins()->HandleApiCallConstruct();
1005 ParameterCount expected(0);
1006 __ InvokeCode(code, expected, expected,
1009 ParameterCount actual(a0);
1015 if (!is_api_function && !count_constructions) {
1016 masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
1025 Label use_receiver, exit;
1032 __ JumpIfSmi(v0, &use_receiver);
1036 __ GetObjectType(v0, a3, a3);
1041 __ bind(&use_receiver);
1059 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, a1, a2);
1064 void Builtins::Generate_JSConstructStubCountdown(MacroAssembler* masm) {
1065 Generate_JSConstructStubHelper(masm,
false,
true);
1069 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
1070 Generate_JSConstructStubHelper(masm,
false,
false);
1074 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
1075 Generate_JSConstructStubHelper(masm,
true,
false);
1079 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
1080 bool is_construct) {
1092 __ mov(
cp, zero_reg);
1109 __ addu(t2,
s0, t0);
1119 __ Branch(&loop,
ne,
s0, Operand(t2));
1123 __ LoadRoot(t0, Heap::kUndefinedValueRootIndex);
1138 ParameterCount actual(a0);
1150 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
1151 Generate_JSEntryTrampolineHelper(masm,
false);
1155 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
1156 Generate_JSEntryTrampolineHelper(masm,
true);
1160 void Builtins::Generate_LazyCompile(MacroAssembler* masm) {
1173 __ CallRuntime(Runtime::kLazyCompile, 1);
1190 void Builtins::Generate_LazyRecompile(MacroAssembler* masm) {
1202 __ CallRuntime(Runtime::kLazyRecompile, 1);
1219 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
1226 __ CallRuntime(Runtime::kNotifyDeoptimized, 1);
1233 Label with_tos_register, unknown_state;
1234 __ Branch(&with_tos_register,
1239 __ bind(&with_tos_register);
1246 __ bind(&unknown_state);
1247 __ stop(
"no cases left");
1251 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1256 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
1261 void Builtins::Generate_NotifyOSR(MacroAssembler* masm) {
1268 __ MultiPush(saved_regs);
1271 __ CallRuntime(Runtime::kNotifyOSR, 0);
1273 __ MultiPop(saved_regs);
1278 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1279 CpuFeatures::TryForceFeatureScope scope(
VFP3);
1281 __ Abort(
"Unreachable code: Cannot optimize without FPU support.");
1291 __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
1305 generator.Generate();
1309 void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
1313 __ Branch(&done,
ne, a0, Operand(zero_reg));
1314 __ LoadRoot(t2, Heap::kUndefinedValueRootIndex);
1316 __ Addu(a0, a0, Operand(1));
1323 Label slow, non_function;
1325 __ addu(at,
sp, at);
1327 __ JumpIfSmi(a1, &non_function);
1328 __ GetObjectType(a1, a2, a2);
1334 Label shift_arguments;
1336 { Label convert_to_object, use_global_receiver, patch_receiver;
1345 __ Branch(&shift_arguments,
ne, t3, Operand(zero_reg));
1349 __ Branch(&shift_arguments,
ne, t3, Operand(zero_reg));
1354 __ addu(a2,
sp, at);
1359 __ JumpIfSmi(a2, &convert_to_object, t2);
1361 __ LoadRoot(a3, Heap::kUndefinedValueRootIndex);
1362 __ Branch(&use_global_receiver,
eq, a2, Operand(a3));
1363 __ LoadRoot(a3, Heap::kNullValueRootIndex);
1364 __ Branch(&use_global_receiver,
eq, a2, Operand(a3));
1367 __ GetObjectType(a2, a3, a3);
1370 __ bind(&convert_to_object);
1387 __ addu(at,
sp, at);
1390 __ Branch(&patch_receiver);
1394 __ bind(&use_global_receiver);
1395 const int kGlobalIndex =
1402 __ bind(&patch_receiver);
1404 __ addu(a3,
sp, at);
1407 __ Branch(&shift_arguments);
1415 __ bind(&non_function);
1426 __ addu(a2,
sp, at);
1435 __ bind(&shift_arguments);
1439 __ addu(a2,
sp, at);
1444 __ Subu(a2, a2, Operand(kPointerSize));
1445 __ Branch(&loop,
ne, a2, Operand(
sp));
1448 __ Subu(a0, a0, Operand(1));
1457 { Label
function, non_proxy;
1458 __ Branch(&
function,
eq, t0, Operand(zero_reg));
1460 __ mov(a2, zero_reg);
1462 __ Branch(&non_proxy,
ne, t0, Operand(1));
1465 __ Addu(a0, a0, Operand(1));
1466 __ GetBuiltinEntry(a3, Builtins::CALL_FUNCTION_PROXY);
1467 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1468 RelocInfo::CODE_TARGET);
1470 __ bind(&non_proxy);
1471 __ GetBuiltinEntry(a3, Builtins::CALL_NON_FUNCTION);
1472 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1473 RelocInfo::CODE_TARGET);
1489 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1490 RelocInfo::CODE_TARGET,
ne, a2, Operand(a0));
1492 ParameterCount expected(0);
1498 void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
1518 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
1521 __ subu(a2,
sp, a2);
1524 __ Branch(&okay,
gt, a2, Operand(t3));
1536 __ mov(a1, zero_reg);
1543 Label push_receiver;
1545 __ GetObjectType(a1, a2, a2);
1555 Label call_to_object, use_global_receiver;
1559 __ Branch(&push_receiver, ne, t3, Operand(zero_reg));
1563 __ Branch(&push_receiver, ne, t3, Operand(zero_reg));
1566 __ JumpIfSmi(a0, &call_to_object);
1567 __ LoadRoot(a1, Heap::kNullValueRootIndex);
1568 __ Branch(&use_global_receiver,
eq, a0, Operand(a1));
1569 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
1570 __ Branch(&use_global_receiver,
eq, a0, Operand(a2));
1575 __ GetObjectType(a0, a1, a1);
1580 __ bind(&call_to_object);
1584 __ Branch(&push_receiver);
1587 __ bind(&use_global_receiver);
1588 const int kGlobalOffset =
1597 __ bind(&push_receiver);
1614 __ CallRuntime(Runtime::kGetProperty, 2);
1626 __ Branch(&loop, ne, a0, Operand(a1));
1630 ParameterCount actual(a0);
1633 __ GetObjectType(a1, a2, a2);
1639 frame_scope.GenerateLeaveFrame();
1641 __ Addu(
sp,
sp, Operand(3 * kPointerSize));
1644 __ bind(&call_proxy);
1646 __ Addu(a0, a0, Operand(1));
1649 __ GetBuiltinEntry(a3, Builtins::CALL_FUNCTION_PROXY);
1650 __ Call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1651 RelocInfo::CODE_TARGET);
1656 __ Addu(
sp,
sp, Operand(3 * kPointerSize));
1660 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1663 __ MultiPush(a0.bit() | a1.bit() | t0.bit() |
fp.
bit() | ra.bit());
1664 __ Addu(
fp,
sp, Operand(3 * kPointerSize));
1668 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1676 __ MultiPop(
fp.
bit() | ra.bit());
1680 __ Addu(
sp,
sp, Operand(kPointerSize));
1684 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
1694 Label invoke, dont_adapt_arguments;
1696 Label enough, too_few;
1697 __ Branch(&dont_adapt_arguments,
eq,
1700 __ Branch(&too_few,
Uless, a0, Operand(a2));
1708 EnterArgumentsAdaptorFrame(masm);
1712 __ Addu(a0,
fp, a0);
1714 __ Addu(a0, a0, Operand(2 * kPointerSize));
1717 __ subu(a2, a0, a2);
1730 __ addiu(a0, a0, -kPointerSize);
1737 EnterArgumentsAdaptorFrame(masm);
1745 __ Addu(a0,
fp, a0);
1747 __ Addu(a0, a0, Operand(2 * kPointerSize));
1749 __ Addu(t3,
fp, kPointerSize);
1760 __ Subu(
sp,
sp, kPointerSize);
1761 __ Subu(a0, a0, kPointerSize);
1769 __ LoadRoot(t0, Heap::kUndefinedValueRootIndex);
1771 __ Subu(a2,
fp, Operand(t2));
1772 __ Addu(a2, a2, Operand(-4 * kPointerSize));
1776 __ Subu(
sp,
sp, kPointerSize);
1787 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
1790 LeaveArgumentsAdaptorFrame(masm);
1797 __ bind(&dont_adapt_arguments);
1806 #endif // V8_TARGET_ARCH_MIPS
const intptr_t kSmiTagMask
static const int kCodeEntryOffset
static const int kPrototypeOrInitialMapOffset
static int SlotOffset(int index)
static Smi * FromInt(int value)
const intptr_t kIntptrSignBit
static const int kGlobalReceiverOffset
static const int kConstructionCountOffset
static bool IsSupported(CpuFeature f)
static const int kCallerSPOffset
#define ASSERT(condition)
const RegList kJSCallerSaved
const int kPointerSizeLog2
static const int kInstanceSizeOffset
static const int kUnusedPropertyFieldsOffset
static const int kInstanceSizesOffset
static const int kGlobalContextOffset
static const int kContextOffset
const intptr_t kHeapObjectTagMask
static const int kInObjectPropertiesByte
const uint32_t kNotStringTag
STATIC_ASSERT((FixedDoubleArray::kHeaderSize &kDoubleAlignmentMask)==0)
static const int kDontAdaptArgumentsSentinel
const RegList kCalleeSaved
static const int kPropertiesOffset
static const int kElementsOffset
static const int kLengthOffset
static int SizeFor(int length)
static const int kHeaderSize
static const int kMapOffset
const uint32_t kIsNotStringMask
static const int kLengthOffset
MemOperand FieldMemOperand(Register object, int offset)
static const int kContextOffset
static const int kFunctionOffset
static const int kFormalParameterCountOffset
static const int kHeaderSize
#define ASSERT_EQ(v1, v2)
static const int kHeaderSize
static void GenerateLookupNumberStringCache(MacroAssembler *masm, Register object, Register result, Register scratch1, Register scratch2, Register scratch3, bool object_is_smi, Label *not_found)
static const int kPreAllocatedPropertyFieldsByte
static const int kPreallocatedArrayElements
static const int kValueOffset
static const int kCompilerHintsOffset
static const int kSharedFunctionInfoOffset
static const int kInitialMaxFastElementArray
static const int kInstanceTypeOffset