30 #if V8_TARGET_ARCH_MIPS
44 CodeStubInterfaceDescriptor* descriptor) {
45 static Register registers[] = { a2 };
46 descriptor->register_param_count_ = 1;
47 descriptor->register_params_ = registers;
48 descriptor->deoptimization_handler_ =
53 void FastNewContextStub::InitializeInterfaceDescriptor(
55 CodeStubInterfaceDescriptor* descriptor) {
56 static Register registers[] = { a1 };
57 descriptor->register_param_count_ = 1;
58 descriptor->register_params_ = registers;
59 descriptor->deoptimization_handler_ =
NULL;
65 CodeStubInterfaceDescriptor* descriptor) {
66 static Register registers[] = { a0 };
67 descriptor->register_param_count_ = 1;
68 descriptor->register_params_ = registers;
69 descriptor->deoptimization_handler_ =
NULL;
73 void NumberToStringStub::InitializeInterfaceDescriptor(
75 CodeStubInterfaceDescriptor* descriptor) {
76 static Register registers[] = { a0 };
77 descriptor->register_param_count_ = 1;
78 descriptor->register_params_ = registers;
79 descriptor->deoptimization_handler_ =
86 CodeStubInterfaceDescriptor* descriptor) {
87 static Register registers[] = { a3, a2, a1 };
88 descriptor->register_param_count_ = 3;
89 descriptor->register_params_ = registers;
90 descriptor->deoptimization_handler_ =
92 Runtime::kHiddenCreateArrayLiteralStubBailout)->
entry;
98 CodeStubInterfaceDescriptor* descriptor) {
99 static Register registers[] = { a3, a2, a1, a0 };
100 descriptor->register_param_count_ = 4;
101 descriptor->register_params_ = registers;
102 descriptor->deoptimization_handler_ =
109 CodeStubInterfaceDescriptor* descriptor) {
110 static Register registers[] = { a2, a3 };
111 descriptor->register_param_count_ = 2;
112 descriptor->register_params_ = registers;
113 descriptor->deoptimization_handler_ =
NULL;
119 CodeStubInterfaceDescriptor* descriptor) {
120 static Register registers[] = { a1, a0 };
121 descriptor->register_param_count_ = 2;
122 descriptor->register_params_ = registers;
123 descriptor->deoptimization_handler_ =
130 CodeStubInterfaceDescriptor* descriptor) {
131 static Register registers[] = {a1, a0 };
132 descriptor->register_param_count_ = 2;
133 descriptor->register_params_ = registers;
134 descriptor->deoptimization_handler_ =
139 void RegExpConstructResultStub::InitializeInterfaceDescriptor(
141 CodeStubInterfaceDescriptor* descriptor) {
142 static Register registers[] = { a2, a1, a0 };
143 descriptor->register_param_count_ = 3;
144 descriptor->register_params_ = registers;
145 descriptor->deoptimization_handler_ =
152 CodeStubInterfaceDescriptor* descriptor) {
153 static Register registers[] = { a0 };
154 descriptor->register_param_count_ = 1;
155 descriptor->register_params_ = registers;
156 descriptor->deoptimization_handler_ =
NULL;
162 CodeStubInterfaceDescriptor* descriptor) {
163 static Register registers[] = { a1 };
164 descriptor->register_param_count_ = 1;
165 descriptor->register_params_ = registers;
166 descriptor->deoptimization_handler_ =
NULL;
172 CodeStubInterfaceDescriptor* descriptor) {
173 static Register registers[] = { a0, a2 };
174 descriptor->register_param_count_ = 2;
175 descriptor->register_params_ = registers;
176 descriptor->deoptimization_handler_ =
NULL;
182 CodeStubInterfaceDescriptor* descriptor) {
183 static Register registers[] = { a1, a0 };
184 descriptor->register_param_count_ = 2;
185 descriptor->register_params_ = registers;
186 descriptor->deoptimization_handler_ =
NULL;
192 CodeStubInterfaceDescriptor* descriptor) {
193 static Register registers[] = { a2, a1, a0 };
194 descriptor->register_param_count_ = 3;
195 descriptor->register_params_ = registers;
196 descriptor->deoptimization_handler_ =
203 CodeStubInterfaceDescriptor* descriptor) {
204 static Register registers[] = { a0, a1 };
205 descriptor->register_param_count_ = 2;
206 descriptor->register_params_ = registers;
215 CodeStubInterfaceDescriptor* descriptor) {
216 static Register registers[] = { a0 };
217 descriptor->register_param_count_ = 1;
218 descriptor->register_params_ = registers;
219 descriptor->deoptimization_handler_ =
221 descriptor->SetMissHandler(
222 ExternalReference(IC_Utility(IC::kCompareNilIC_Miss), isolate));
226 static void InitializeArrayConstructorDescriptor(
228 CodeStubInterfaceDescriptor* descriptor,
229 int constant_stack_parameter_count) {
234 static Register registers_variable_args[] = { a1, a2, a0 };
235 static Register registers_no_args[] = { a1, a2 };
237 if (constant_stack_parameter_count == 0) {
238 descriptor->register_param_count_ = 2;
239 descriptor->register_params_ = registers_no_args;
243 descriptor->stack_parameter_count_ = a0;
244 descriptor->register_param_count_ = 3;
245 descriptor->register_params_ = registers_variable_args;
248 descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count;
250 descriptor->deoptimization_handler_ =
255 static void InitializeInternalArrayConstructorDescriptor(
257 CodeStubInterfaceDescriptor* descriptor,
258 int constant_stack_parameter_count) {
262 static Register registers_variable_args[] = { a1, a0 };
263 static Register registers_no_args[] = { a1 };
265 if (constant_stack_parameter_count == 0) {
266 descriptor->register_param_count_ = 1;
267 descriptor->register_params_ = registers_no_args;
271 descriptor->stack_parameter_count_ = a0;
272 descriptor->register_param_count_ = 2;
273 descriptor->register_params_ = registers_variable_args;
276 descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count;
278 descriptor->deoptimization_handler_ =
285 CodeStubInterfaceDescriptor* descriptor) {
286 InitializeArrayConstructorDescriptor(isolate, descriptor, 0);
292 CodeStubInterfaceDescriptor* descriptor) {
293 InitializeArrayConstructorDescriptor(isolate, descriptor, 1);
299 CodeStubInterfaceDescriptor* descriptor) {
300 InitializeArrayConstructorDescriptor(isolate, descriptor, -1);
306 CodeStubInterfaceDescriptor* descriptor) {
307 static Register registers[] = { a0 };
308 descriptor->register_param_count_ = 1;
309 descriptor->register_params_ = registers;
310 descriptor->deoptimization_handler_ =
312 descriptor->SetMissHandler(
313 ExternalReference(IC_Utility(IC::kToBooleanIC_Miss), isolate));
319 CodeStubInterfaceDescriptor* descriptor) {
320 InitializeInternalArrayConstructorDescriptor(isolate, descriptor, 0);
326 CodeStubInterfaceDescriptor* descriptor) {
327 InitializeInternalArrayConstructorDescriptor(isolate, descriptor, 1);
333 CodeStubInterfaceDescriptor* descriptor) {
334 InitializeInternalArrayConstructorDescriptor(isolate, descriptor, -1);
340 CodeStubInterfaceDescriptor* descriptor) {
341 static Register registers[] = { a1, a2, a0 };
342 descriptor->register_param_count_ = 3;
343 descriptor->register_params_ = registers;
344 descriptor->deoptimization_handler_ =
351 CodeStubInterfaceDescriptor* descriptor) {
352 static Register registers[] = { a0, a3, a1, a2 };
353 descriptor->register_param_count_ = 4;
354 descriptor->register_params_ = registers;
355 descriptor->deoptimization_handler_ =
362 CodeStubInterfaceDescriptor* descriptor) {
363 static Register registers[] = { a1, a0 };
364 descriptor->register_param_count_ = 2;
365 descriptor->register_params_ = registers;
366 descriptor->deoptimization_handler_ =
FUNCTION_ADDR(BinaryOpIC_Miss);
367 descriptor->SetMissHandler(
368 ExternalReference(IC_Utility(IC::kBinaryOpIC_Miss), isolate));
372 void BinaryOpWithAllocationSiteStub::InitializeInterfaceDescriptor(
374 CodeStubInterfaceDescriptor* descriptor) {
375 static Register registers[] = { a2, a1, a0 };
376 descriptor->register_param_count_ = 3;
377 descriptor->register_params_ = registers;
378 descriptor->deoptimization_handler_ =
383 void StringAddStub::InitializeInterfaceDescriptor(
385 CodeStubInterfaceDescriptor* descriptor) {
386 static Register registers[] = { a1, a0 };
387 descriptor->register_param_count_ = 2;
388 descriptor->register_params_ = registers;
389 descriptor->deoptimization_handler_ =
396 CallInterfaceDescriptor* descriptor =
398 static Register registers[] = { a1,
403 static Representation representations[] = {
409 descriptor->register_param_count_ = 4;
410 descriptor->register_params_ = registers;
411 descriptor->param_representations_ = representations;
414 CallInterfaceDescriptor* descriptor =
416 static Register registers[] = {
cp,
419 static Representation representations[] = {
423 descriptor->register_param_count_ = 2;
424 descriptor->register_params_ = registers;
425 descriptor->param_representations_ = representations;
428 CallInterfaceDescriptor* descriptor =
430 static Register registers[] = {
cp,
433 static Representation representations[] = {
437 descriptor->register_param_count_ = 2;
438 descriptor->register_params_ = registers;
439 descriptor->param_representations_ = representations;
442 CallInterfaceDescriptor* descriptor =
444 static Register registers[] = {
cp,
447 static Representation representations[] = {
451 descriptor->register_param_count_ = 2;
452 descriptor->register_params_ = registers;
453 descriptor->param_representations_ = representations;
456 CallInterfaceDescriptor* descriptor =
458 static Register registers[] = { a0,
464 static Representation representations[] = {
471 descriptor->register_param_count_ = 5;
472 descriptor->register_params_ = registers;
473 descriptor->param_representations_ = representations;
478 #define __ ACCESS_MASM(masm)
481 static void EmitIdenticalObjectComparison(MacroAssembler* masm,
484 static void EmitSmiNonsmiComparison(MacroAssembler* masm,
490 static void EmitStrictTwoHeapObjectCompare(MacroAssembler* masm,
495 void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm) {
497 Isolate* isolate = masm->isolate();
498 isolate->counters()->code_stubs()->Increment();
501 int param_count = descriptor->register_param_count_;
505 ASSERT(descriptor->register_param_count_ == 0 ||
506 a0.is(descriptor->register_params_[param_count - 1]));
509 for (
int i = 0; i < param_count; ++i) {
511 __ sw(descriptor->register_params_[i],
514 ExternalReference miss = descriptor->miss_handler();
515 __ CallExternalReference(miss, descriptor->register_param_count_);
527 class ConvertToDoubleStub :
public PlatformCodeStub {
529 ConvertToDoubleStub(Register result_reg_1,
530 Register result_reg_2,
532 Register scratch_reg)
533 : result1_(result_reg_1),
534 result2_(result_reg_2),
536 zeros_(scratch_reg) { }
545 class ModeBits:
public BitField<OverwriteMode, 0, 2> {};
546 class OpBits:
public BitField<Token::Value, 2, 14> {};
548 Major MajorKey() {
return ConvertToDouble; }
551 return result1_.code() +
552 (result2_.code() << 4) +
553 (source_.code() << 8) +
554 (zeros_.code() << 12);
557 void Generate(MacroAssembler* masm);
561 void ConvertToDoubleStub::Generate(MacroAssembler* masm) {
562 #ifndef BIG_ENDIAN_FLOATING_POINT
563 Register exponent = result1_;
564 Register mantissa = result2_;
566 Register exponent = result2_;
567 Register mantissa = result1_;
578 __ subu(at, zero_reg, source_);
579 __ Movn(source_, at, exponent);
584 __ Branch(¬_special,
gt, source_, Operand(1));
587 const uint32_t exponent_word_for_1 =
590 __ Or(at, exponent, Operand(exponent_word_for_1));
591 __ Movn(exponent, at, source_);
594 __ mov(mantissa, zero_reg);
596 __ bind(¬_special);
599 __ Clz(zeros_, source_);
603 __ subu(mantissa, mantissa, zeros_);
604 __ sll(mantissa, mantissa, HeapNumber::kExponentShift);
605 __ Or(exponent, exponent, mantissa);
608 __ Addu(zeros_, zeros_, Operand(1));
610 __ sllv(source_, source_, zeros_);
617 __ or_(exponent, exponent, source_);
622 Label out_of_range, only_low, negate, done;
623 Register input_reg =
source();
626 int double_offset =
offset();
638 __ Push(scratch, scratch2, scratch3);
642 __ ldc1(double_scratch,
MemOperand(input_reg, double_offset));
649 __ Trunc_w_d(double_scratch, double_scratch);
651 __ mfc1(scratch3, double_scratch);
664 __ Branch(&error,
ne, scratch, Operand(zero_reg));
665 __ Move(result_reg, scratch3);
671 Register input_high = scratch2;
672 Register input_low = scratch3;
677 Label normal_exponent, restore_sign;
681 HeapNumber::kExponentShift,
686 __ Movz(result_reg, zero_reg, scratch);
687 __ Branch(&done,
eq, scratch, Operand(zero_reg));
696 __ Branch(&normal_exponent,
le, result_reg, Operand(zero_reg));
697 __ mov(result_reg, zero_reg);
700 __ bind(&normal_exponent);
706 Register
sign = result_reg;
712 Label high_shift_needed, high_shift_done;
713 __ Branch(&high_shift_needed,
lt, scratch, Operand(32));
714 __ mov(input_high, zero_reg);
715 __ Branch(&high_shift_done);
716 __ bind(&high_shift_needed);
725 __ sllv(input_high, input_high, scratch);
727 __ bind(&high_shift_done);
730 Label pos_shift, shift_done;
732 __ subu(scratch, at, scratch);
733 __ Branch(&pos_shift,
ge, scratch, Operand(zero_reg));
736 __ Subu(scratch, zero_reg, scratch);
737 __ sllv(input_low, input_low, scratch);
738 __ Branch(&shift_done);
741 __ srlv(input_low, input_low, scratch);
743 __ bind(&shift_done);
744 __ Or(input_high, input_high, Operand(input_low));
746 __ mov(scratch, sign);
749 __ Subu(result_reg, zero_reg, input_high);
750 __ Movz(result_reg, input_high, scratch);
754 __ Pop(scratch, scratch2, scratch3);
763 stub1.GetCode(isolate);
764 stub2.GetCode(isolate);
769 void WriteInt32ToHeapNumberStub::Generate(MacroAssembler* masm) {
770 Label max_negative_int;
775 __ And(sign_, the_int_, Operand(0x80000000u));
776 __ Branch(&max_negative_int,
eq, the_int_, Operand(0x80000000u));
780 uint32_t non_smi_exponent =
782 __ li(scratch_, Operand(non_smi_exponent));
784 __ or_(scratch_, scratch_, sign_);
786 __ subu(at, zero_reg, the_int_);
787 __ Movn(the_int_, at, sign_);
792 ASSERT(((1 << HeapNumber::kExponentShift) & non_smi_exponent) != 0);
794 __ srl(at, the_int_, shift_distance);
795 __ or_(scratch_, scratch_, at);
798 __ sll(scratch_, the_int_, 32 - shift_distance);
803 __ bind(&max_negative_int);
812 __ mov(scratch_, zero_reg);
822 static void EmitIdenticalObjectComparison(MacroAssembler* masm,
826 Label heap_number, return_equal;
827 Register exp_mask_reg = t5;
829 __ Branch(¬_identical,
ne, a0, Operand(a1));
838 __ GetObjectType(a0, t4, t4);
841 __ GetObjectType(a0, t4, t4);
851 __ LoadRoot(t2, Heap::kUndefinedValueRootIndex);
852 __ Branch(&return_equal,
ne, a0, Operand(t2));
866 __ bind(&return_equal);
874 __ mov(v0, zero_reg);
880 if (cc !=
lt && cc !=
gt) {
881 __ bind(&heap_number);
890 __ And(t3, t2, Operand(exp_mask_reg));
892 __ Branch(&return_equal,
ne, t3, Operand(exp_mask_reg));
898 __ Or(v0, t3, Operand(t2));
905 __ Ret(
eq, v0, Operand(zero_reg));
917 __ bind(¬_identical);
921 static void EmitSmiNonsmiComparison(MacroAssembler* masm,
924 Label* both_loaded_as_doubles,
927 ASSERT((lhs.is(a0) && rhs.is(a1)) ||
928 (lhs.is(a1) && rhs.is(a0)));
931 __ JumpIfSmi(lhs, &lhs_is_smi);
934 __ GetObjectType(lhs, t4, t4);
954 __ jmp(both_loaded_as_doubles);
956 __ bind(&lhs_is_smi);
958 __ GetObjectType(rhs, t4, t4);
963 __ li(v0, Operand(1));
980 static void EmitStrictTwoHeapObjectCompare(MacroAssembler* masm,
987 Label first_non_object;
990 __ GetObjectType(lhs, a2, a2);
994 Label return_not_equal;
995 __ bind(&return_not_equal);
997 __ li(v0, Operand(1));
999 __ bind(&first_non_object);
1003 __ GetObjectType(rhs, a3, a3);
1012 __ Or(a2, a2, Operand(a3));
1014 __ Branch(&return_not_equal,
eq, at, Operand(zero_reg));
1018 static void EmitCheckForTwoHeapNumbers(MacroAssembler* masm,
1021 Label* both_loaded_as_doubles,
1022 Label* not_heap_numbers,
1024 __ GetObjectType(lhs, a3, a2);
1028 __ Branch(slow,
ne, a3, Operand(a2));
1035 __ jmp(both_loaded_as_doubles);
1040 static void EmitCheckForInternalizedStringsOrObjects(MacroAssembler* masm,
1043 Label* possible_strings,
1044 Label* not_both_strings) {
1045 ASSERT((lhs.is(a0) && rhs.is(a1)) ||
1046 (lhs.is(a1) && rhs.is(a0)));
1052 __ Branch(&object_test,
ne, at, Operand(zero_reg));
1054 __ Branch(possible_strings,
ne, at, Operand(zero_reg));
1055 __ GetObjectType(rhs, a3, a3);
1058 __ Branch(possible_strings,
ne, at, Operand(zero_reg));
1063 __ li(v0, Operand(1));
1065 __ bind(&object_test);
1067 __ GetObjectType(rhs, a2, a3);
1076 __ and_(a0, a2, a3);
1083 static void ICCompareStub_CheckInputType(MacroAssembler* masm,
1090 __ JumpIfNotSmi(input, fail);
1092 __ JumpIfSmi(input, &ok);
1093 __ CheckMap(input, scratch, Heap::kHeapNumberMapRootIndex, fail,
1105 void ICCompareStub::GenerateGeneric(MacroAssembler* masm) {
1111 ICCompareStub_CheckInputType(masm, lhs, a2, left_, &miss);
1112 ICCompareStub_CheckInputType(masm, rhs, a3, right_, &miss);
1115 Label not_smis, both_loaded_as_doubles;
1117 Label not_two_smis, smi_done;
1119 __ JumpIfNotSmi(a2, ¬_two_smis);
1123 __ subu(v0, a1, a0);
1124 __ bind(¬_two_smis);
1131 EmitIdenticalObjectComparison(masm, &slow, cc);
1137 __ And(t2, lhs, Operand(rhs));
1138 __ JumpIfNotSmi(t2, ¬_smis, t0);
1147 EmitSmiNonsmiComparison(masm, lhs, rhs,
1148 &both_loaded_as_doubles, &slow, strict());
1150 __ bind(&both_loaded_as_doubles);
1155 Isolate* isolate = masm->isolate();
1157 __ li(t0, Operand(
LESS));
1184 if (cc ==
lt || cc ==
le) {
1187 __ li(v0, Operand(
LESS));
1197 EmitStrictTwoHeapObjectCompare(masm, lhs, rhs);
1200 Label check_for_internalized_strings;
1201 Label flat_string_check;
1207 EmitCheckForTwoHeapNumbers(masm,
1210 &both_loaded_as_doubles,
1211 &check_for_internalized_strings,
1212 &flat_string_check);
1214 __ bind(&check_for_internalized_strings);
1215 if (cc ==
eq && !strict()) {
1220 EmitCheckForInternalizedStringsOrObjects(
1221 masm, lhs, rhs, &flat_string_check, &slow);
1226 __ bind(&flat_string_check);
1228 __ JumpIfNonSmisNotBothSequentialAsciiStrings(lhs, rhs, a2, a3, &slow);
1230 __ IncrementCounter(isolate->counters()->string_compare_native(), 1, a2, a3);
1256 native = strict() ? Builtins::STRICT_EQUALS : Builtins::EQUALS;
1260 if (cc ==
lt || cc ==
le) {
1279 void StoreRegistersStateStub::Generate(MacroAssembler* masm) {
1283 __ PushSafepointRegistersAndDoubles();
1285 __ PushSafepointRegisters();
1291 void RestoreRegistersStateStub::Generate(MacroAssembler* masm) {
1294 __ StoreToSafepointRegisterSlot(t9, t9);
1296 __ PopSafepointRegistersAndDoubles();
1298 __ PopSafepointRegisters();
1312 const int argument_count = 1;
1313 const int fp_argument_count = 0;
1314 const Register scratch = a1;
1316 AllowExternalCallThatCantCauseGC scope(masm);
1317 __ PrepareCallCFunction(argument_count, fp_argument_count, scratch);
1318 __ li(a0, Operand(ExternalReference::isolate_address(masm->isolate())));
1320 ExternalReference::store_buffer_overflow_function(masm->isolate()),
1332 const Register base = a1;
1333 const Register exponent = a2;
1334 const Register heapnumbermap = t1;
1335 const Register heapnumber = v0;
1340 const FPURegister single_scratch =
f8;
1341 const Register scratch = t5;
1342 const Register scratch2 = t3;
1344 Label call_runtime, done, int_exponent;
1346 Label base_is_smi, unpack_exponent;
1353 __ LoadRoot(heapnumbermap, Heap::kHeapNumberMapRootIndex);
1355 __ UntagAndJumpIfSmi(scratch, base, &base_is_smi);
1357 __ Branch(&call_runtime,
ne, scratch, Operand(heapnumbermap));
1360 __ jmp(&unpack_exponent);
1362 __ bind(&base_is_smi);
1363 __ mtc1(scratch, single_scratch);
1364 __ cvt_d_w(double_base, single_scratch);
1365 __ bind(&unpack_exponent);
1367 __ UntagAndJumpIfSmi(scratch, exponent, &int_exponent);
1370 __ Branch(&call_runtime,
ne, scratch, Operand(heapnumbermap));
1371 __ ldc1(double_exponent,
1373 }
else if (exponent_type_ ==
TAGGED) {
1375 __ UntagAndJumpIfSmi(scratch, exponent, &int_exponent);
1377 __ ldc1(double_exponent,
1381 if (exponent_type_ !=
INTEGER) {
1382 Label int_exponent_convert;
1392 __ Branch(&int_exponent_convert,
eq, scratch2, Operand(zero_reg));
1398 Label not_plus_half;
1401 __ Move(double_scratch, 0.5);
1413 __ neg_d(double_result, double_scratch);
1417 __ sqrt_d(double_result, double_scratch);
1420 __ bind(¬_plus_half);
1421 __ Move(double_scratch, -0.5);
1437 __ Move(double_result, 1);
1438 __ sqrt_d(double_scratch, double_scratch);
1439 __ div_d(double_result, double_result, double_scratch);
1445 AllowExternalCallThatCantCauseGC scope(masm);
1446 __ PrepareCallCFunction(0, 2, scratch2);
1447 __ MovToFloatParameters(double_base, double_exponent);
1449 ExternalReference::power_double_double_function(masm->isolate()),
1453 __ MovFromFloatResult(double_result);
1456 __ bind(&int_exponent_convert);
1460 __ bind(&int_exponent);
1463 if (exponent_type_ ==
INTEGER) {
1464 __ mov(scratch, exponent);
1467 __ mov(exponent, scratch);
1470 __ mov_d(double_scratch, double_base);
1471 __ Move(double_result, 1.0);
1474 Label positive_exponent;
1475 __ Branch(&positive_exponent,
ge, scratch, Operand(zero_reg));
1476 __ Subu(scratch, zero_reg, scratch);
1477 __ bind(&positive_exponent);
1479 Label while_true, no_carry, loop_end;
1480 __ bind(&while_true);
1482 __ And(scratch2, scratch, 1);
1484 __ Branch(&no_carry,
eq, scratch2, Operand(zero_reg));
1485 __ mul_d(double_result, double_result, double_scratch);
1488 __ sra(scratch, scratch, 1);
1490 __ Branch(&loop_end,
eq, scratch, Operand(zero_reg));
1491 __ mul_d(double_scratch, double_scratch, double_scratch);
1493 __ Branch(&while_true);
1497 __ Branch(&done,
ge, exponent, Operand(zero_reg));
1498 __ Move(double_scratch, 1.0);
1499 __ div_d(double_result, double_scratch, double_result);
1506 __ mtc1(exponent, single_scratch);
1507 __ cvt_d_w(double_exponent, single_scratch);
1510 Counters* counters = masm->isolate()->counters();
1513 __ bind(&call_runtime);
1514 __ TailCallRuntime(Runtime::kMath_pow_cfunction, 2, 1);
1519 __ AllocateHeapNumber(
1520 heapnumber, scratch, scratch2, heapnumbermap, &call_runtime);
1521 __ sdc1(double_result,
1523 ASSERT(heapnumber.is(v0));
1524 __ IncrementCounter(counters->math_pow(), 1, scratch, scratch2);
1529 AllowExternalCallThatCantCauseGC scope(masm);
1530 __ PrepareCallCFunction(0, 2, scratch);
1531 __ MovToFloatParameters(double_base, double_exponent);
1533 ExternalReference::power_double_double_function(masm->isolate()),
1537 __ MovFromFloatResult(double_result);
1540 __ IncrementCounter(counters->math_pow(), 1, scratch, scratch2);
1546 bool CEntryStub::NeedsImmovableCode() {
1551 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
1561 BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate);
1568 stub1.GetCode(isolate);
1571 stub2.GetCode(isolate);
1578 stub1.GetCode(isolate);
1581 stub2.GetCode(isolate);
1585 void CodeStub::GenerateFPStubs(Isolate* isolate) {
1587 CEntryStub save_doubles(1, mode);
1588 StoreBufferOverflowStub stub(mode);
1592 Code* save_doubles_code;
1593 if (!save_doubles.FindCodeInCache(&save_doubles_code, isolate)) {
1594 save_doubles_code = *save_doubles.GetCode(isolate);
1596 Code* store_buffer_overflow_code;
1597 if (!stub.FindCodeInCache(&store_buffer_overflow_code, isolate)) {
1598 store_buffer_overflow_code = *stub.GetCode(isolate);
1600 isolate->set_fp_stubs_generated(
true);
1606 stub.GetCode(isolate);
1610 void CEntryStub::GenerateCore(MacroAssembler* masm,
1611 Label* throw_normal_exception,
1612 Label* throw_termination_exception,
1614 bool always_allocate) {
1620 Isolate* isolate = masm->isolate();
1625 __ PrepareCallCFunction(2, 0, a1);
1626 __ li(a1, Operand(ExternalReference::isolate_address(masm->isolate())));
1627 __ CallCFunction(ExternalReference::perform_gc_function(isolate), 2, 0);
1630 ExternalReference scope_depth =
1631 ExternalReference::heap_always_allocate_scope_depth(isolate);
1632 if (always_allocate) {
1633 __ li(a0, Operand(scope_depth));
1635 __ Addu(a1, a1, Operand(1));
1647 __ AssertStackIsAligned();
1649 __ li(a2, Operand(ExternalReference::isolate_address(isolate)));
1661 masm->bal(&find_ra);
1663 masm->bind(&find_ra);
1668 const int kNumInstructionsToJump = 5;
1669 masm->Addu(ra, ra, kNumInstructionsToJump *
kPointerSize);
1681 masm->InstructionsGeneratedSince(&find_ra));
1684 if (always_allocate) {
1686 __ li(a2, Operand(scope_depth));
1688 __ Subu(a3, a3, Operand(1));
1693 Label failure_returned;
1695 __ addiu(a2, v0, 1);
1710 __ bind(&failure_returned);
1713 __ Branch(&retry,
eq, t0, Operand(zero_reg));
1716 __ li(t0, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
1721 __ li(a3, Operand(isolate->factory()->the_hole_value()));
1722 __ li(t0, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
1728 __ LoadRoot(t0, Heap::kTerminationExceptionRootIndex);
1729 __ Branch(throw_termination_exception,
eq, v0, Operand(t0));
1732 __ jmp(throw_normal_exception);
1764 FrameScope scope(masm, StackFrame::MANUAL);
1765 __ EnterExitFrame(save_doubles_);
1771 Label throw_normal_exception;
1772 Label throw_termination_exception;
1776 &throw_normal_exception,
1777 &throw_termination_exception,
1783 &throw_normal_exception,
1784 &throw_termination_exception,
1790 __ li(v0, Operand(reinterpret_cast<int32_t>(failure)));
1792 &throw_normal_exception,
1793 &throw_termination_exception,
1797 { FrameScope scope(masm, StackFrame::MANUAL);
1798 __ PrepareCallCFunction(0, v0);
1800 ExternalReference::out_of_memory_function(masm->isolate()), 0);
1803 __ bind(&throw_termination_exception);
1804 __ ThrowUncatchable(v0);
1806 __ bind(&throw_normal_exception);
1812 Label invoke, handler_entry, exit;
1813 Isolate* isolate = masm->isolate();
1840 __ InitializeRootRegister();
1844 __ li(t3, Operand(-1));
1845 int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY;
1848 __ li(t0, Operand(ExternalReference(Isolate::kCEntryFPAddress,
1851 __ Push(t3, t2, t1, t0);
1872 Label non_outermost_js;
1873 ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate);
1874 __ li(t1, Operand(ExternalReference(js_entry_sp)));
1876 __ Branch(&non_outermost_js,
ne, t2, Operand(zero_reg));
1878 __ li(t0, Operand(
Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
1882 __ bind(&non_outermost_js);
1883 __ li(t0, Operand(
Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME)));
1890 __ bind(&handler_entry);
1891 handler_offset_ = handler_entry.pos();
1896 __ li(t0, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
1906 __ PushTryHandler(StackHandler::JS_ENTRY, 0);
1913 __ LoadRoot(t1, Heap::kTheHoleValueRootIndex);
1914 __ li(t0, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
1937 ExternalReference construct_entry(Builtins::kJSConstructEntryTrampoline,
1939 __ li(t0, Operand(construct_entry));
1941 ExternalReference entry(Builtins::kJSEntryTrampoline, masm->isolate());
1942 __ li(t0, Operand(entry));
1955 Label non_outermost_js_2;
1957 __ Branch(&non_outermost_js_2,
1960 Operand(
Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
1961 __ li(t1, Operand(ExternalReference(js_entry_sp)));
1963 __ bind(&non_outermost_js_2);
1967 __ li(t0, Operand(ExternalReference(Isolate::kCEntryFPAddress,
1994 ASSERT(HasArgsInRegisters() || !HasCallSiteInlineCheck());
1996 ASSERT(!ReturnTrueFalseObject() || HasCallSiteInlineCheck());
1999 const Register
object = a0;
2001 const Register
function = a1;
2002 const Register prototype = t0;
2003 const Register inline_site = t5;
2004 const Register scratch = a2;
2008 Label slow, loop, is_instance, is_not_instance, not_js_object;
2010 if (!HasArgsInRegisters()) {
2016 __ JumpIfSmi(
object, ¬_js_object);
2017 __ IsObjectJSObjectType(
object, map, scratch, ¬_js_object);
2021 if (!HasCallSiteInlineCheck()) {
2023 __ LoadRoot(at, Heap::kInstanceofCacheFunctionRootIndex);
2024 __ Branch(&miss,
ne,
function, Operand(at));
2025 __ LoadRoot(at, Heap::kInstanceofCacheMapRootIndex);
2026 __ Branch(&miss,
ne, map, Operand(at));
2027 __ LoadRoot(v0, Heap::kInstanceofCacheAnswerRootIndex);
2028 __ DropAndRet(HasArgsInRegisters() ? 0 : 2);
2034 __ TryGetFunctionPrototype(
function, prototype, scratch, &slow,
true);
2037 __ JumpIfSmi(prototype, &slow);
2038 __ IsObjectJSObjectType(prototype, scratch, scratch, &slow);
2042 if (!HasCallSiteInlineCheck()) {
2043 __ StoreRoot(
function, Heap::kInstanceofCacheFunctionRootIndex);
2044 __ StoreRoot(map, Heap::kInstanceofCacheMapRootIndex);
2046 ASSERT(HasArgsInRegisters());
2051 __ LoadFromSafepointRegisterSlot(scratch, t0);
2052 __ Subu(inline_site, ra, scratch);
2054 __ GetRelocatedValue(inline_site, scratch, v1);
2063 Register scratch2 =
map;
2067 __ LoadRoot(scratch2, Heap::kNullValueRootIndex);
2069 __ Branch(&is_instance,
eq, scratch, Operand(prototype));
2070 __ Branch(&is_not_instance,
eq, scratch, Operand(scratch2));
2075 __ bind(&is_instance);
2077 if (!HasCallSiteInlineCheck()) {
2078 __ mov(v0, zero_reg);
2079 __ StoreRoot(v0, Heap::kInstanceofCacheAnswerRootIndex);
2082 __ LoadRoot(v0, Heap::kTrueValueRootIndex);
2083 __ Addu(inline_site, inline_site, Operand(kDeltaToLoadBoolResult));
2085 __ PatchRelocatedValue(inline_site, scratch, v0);
2087 if (!ReturnTrueFalseObject()) {
2089 __ mov(v0, zero_reg);
2092 __ DropAndRet(HasArgsInRegisters() ? 0 : 2);
2094 __ bind(&is_not_instance);
2095 if (!HasCallSiteInlineCheck()) {
2097 __ StoreRoot(v0, Heap::kInstanceofCacheAnswerRootIndex);
2100 __ LoadRoot(v0, Heap::kFalseValueRootIndex);
2101 __ Addu(inline_site, inline_site, Operand(kDeltaToLoadBoolResult));
2103 __ PatchRelocatedValue(inline_site, scratch, v0);
2105 if (!ReturnTrueFalseObject()) {
2110 __ DropAndRet(HasArgsInRegisters() ? 0 : 2);
2112 Label object_not_null, object_not_null_or_smi;
2113 __ bind(¬_js_object);
2116 __ JumpIfSmi(
function, &slow);
2117 __ GetObjectType(
function, scratch2, scratch);
2121 __ Branch(&object_not_null,
2124 Operand(masm->isolate()->factory()->null_value()));
2126 __ DropAndRet(HasArgsInRegisters() ? 0 : 2);
2128 __ bind(&object_not_null);
2130 __ JumpIfNotSmi(
object, &object_not_null_or_smi);
2132 __ DropAndRet(HasArgsInRegisters() ? 0 : 2);
2134 __ bind(&object_not_null_or_smi);
2136 __ IsObjectJSStringType(
object, scratch, &slow);
2138 __ DropAndRet(HasArgsInRegisters() ? 0 : 2);
2142 if (!ReturnTrueFalseObject()) {
2143 if (HasArgsInRegisters()) {
2154 __ LoadRoot(v0, Heap::kTrueValueRootIndex);
2155 __ DropAndRet(HasArgsInRegisters() ? 0 : 2,
eq, a0, Operand(zero_reg));
2156 __ LoadRoot(v0, Heap::kFalseValueRootIndex);
2157 __ DropAndRet(HasArgsInRegisters() ? 0 : 2);
2165 if (
kind() == Code::KEYED_LOAD_IC) {
2171 __ Branch(&miss,
ne, a0,
2172 Operand(masm->isolate()->factory()->prototype_string()));
2185 StubCompiler::GenerateLoadFunctionPrototype(masm, receiver, a3, t0, &miss);
2187 StubCompiler::TailCallBuiltin(
2198 void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
2201 const int kDisplacement =
2206 __ JumpIfNotSmi(a1, &slow);
2220 __ Branch(&slow,
hs, a1, Operand(a0));
2223 __ subu(a3, a0, a1);
2225 __ Addu(a3,
fp, Operand(t3));
2237 __ subu(a3, a0, a1);
2239 __ Addu(a3, a2, Operand(t3));
2247 __ TailCallRuntime(Runtime::kGetArgumentsProperty, 1, 1);
2251 void ArgumentsAccessStub::GenerateNewSloppySlow(MacroAssembler* masm) {
2268 __ Addu(a3, a3, Operand(t3));
2273 __ TailCallRuntime(Runtime::kHiddenNewArgumentsFast, 3, 1);
2277 void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) {
2291 Label adaptor_frame, try_allocate;
2294 __ Branch(&adaptor_frame,
2301 __ b(&try_allocate);
2305 __ bind(&adaptor_frame);
2308 __ Addu(a3, a3, Operand(t6));
2316 __ Branch(&skip_min,
lt, a1, Operand(a2));
2320 __ bind(&try_allocate);
2324 const int kParameterMapHeaderSize =
2327 Label param_map_size;
2330 __ mov(t5, zero_reg);
2332 __ addiu(t5, t5, kParameterMapHeaderSize);
2333 __ bind(¶m_map_size);
2337 __ Addu(t5, t5, Operand(t6));
2349 const int kNormalOffset =
2351 const int kAliasedOffset =
2356 Label skip2_ne, skip2_eq;
2357 __ Branch(&skip2_ne,
ne, a1, Operand(zero_reg));
2361 __ Branch(&skip2_eq,
eq, a1, Operand(zero_reg));
2378 const int kCalleeOffset = JSObject::kHeaderSize +
2384 const int kLengthOffset = JSObject::kHeaderSize +
2399 Label skip_parameter_map;
2409 __ LoadRoot(t2, Heap::kSloppyArgumentsElementsMapRootIndex);
2415 __ Addu(t2, t0, Operand(t6));
2416 __ Addu(t2, t2, Operand(kParameterMapHeaderSize));
2427 Label parameters_loop, parameters_test;
2431 __ Subu(t5, t5, Operand(a1));
2432 __ LoadRoot(t3, Heap::kTheHoleValueRootIndex);
2434 __ Addu(a3, t0, Operand(t6));
2435 __ Addu(a3, a3, Operand(kParameterMapHeaderSize));
2443 __ jmp(¶meters_test);
2445 __ bind(¶meters_loop);
2449 __ Addu(t6, t0, t1);
2452 __ Addu(t6, a3, t1);
2455 __ bind(¶meters_test);
2458 __ bind(&skip_parameter_map);
2463 __ LoadRoot(t1, Heap::kFixedArrayMapRootIndex);
2467 Label arguments_loop, arguments_test;
2471 __ Subu(t0, t0, Operand(t6));
2472 __ jmp(&arguments_test);
2474 __ bind(&arguments_loop);
2475 __ Subu(t0, t0, Operand(kPointerSize));
2478 __ Addu(t1, a3, Operand(t6));
2482 __ bind(&arguments_test);
2483 __ Branch(&arguments_loop,
lt, t5, Operand(a2));
2492 __ TailCallRuntime(Runtime::kHiddenNewArgumentsFast, 3, 1);
2496 void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
2501 Label adaptor_frame, try_allocate, runtime;
2504 __ Branch(&adaptor_frame,
2511 __ Branch(&try_allocate);
2514 __ bind(&adaptor_frame);
2518 __ Addu(a3, a2, Operand(at));
2525 Label add_arguments_object;
2526 __ bind(&try_allocate);
2527 __ Branch(&add_arguments_object,
eq, a1, Operand(zero_reg));
2531 __ bind(&add_arguments_object);
2535 __ Allocate(a1, v0, a2, a3, &runtime,
2545 __ CopyFields(v0, t0, a3.bit(), JSObject::kHeaderSize /
kPointerSize);
2554 __ Branch(&done,
eq, a1, Operand(zero_reg));
2563 __ LoadRoot(a3, Heap::kFixedArrayMapRootIndex);
2576 __ Addu(a2, a2, Operand(-kPointerSize));
2580 __ Addu(t0, t0, Operand(kPointerSize));
2581 __ Subu(a1, a1, Operand(1));
2582 __ Branch(&loop,
ne, a1, Operand(zero_reg));
2590 __ TailCallRuntime(Runtime::kHiddenNewStrictArgumentsFast, 3, 1);
2594 void RegExpExecStub::Generate(MacroAssembler* masm) {
2598 #ifdef V8_INTERPRETED_REGEXP
2599 __ TailCallRuntime(Runtime::kHiddenRegExpExec, 4, 1);
2600 #else // V8_INTERPRETED_REGEXP
2613 Isolate* isolate = masm->isolate();
2622 Register subject =
s0;
2623 Register regexp_data =
s1;
2624 Register last_match_info_elements =
s2;
2627 ExternalReference address_of_regexp_stack_memory_address =
2628 ExternalReference::address_of_regexp_stack_memory_address(
2630 ExternalReference address_of_regexp_stack_memory_size =
2631 ExternalReference::address_of_regexp_stack_memory_size(isolate);
2632 __ li(a0, Operand(address_of_regexp_stack_memory_size));
2634 __ Branch(&runtime,
eq, a0, Operand(zero_reg));
2639 __ JumpIfSmi(a0, &runtime);
2640 __ GetObjectType(a0, a1, a1);
2645 if (FLAG_debug_code) {
2646 __ SmiTst(regexp_data, t0);
2648 kUnexpectedTypeForRegExpDataFixedArrayExpected,
2651 __ GetObjectType(regexp_data, a0, a0);
2653 kUnexpectedTypeForRegExpDataFixedArrayExpected,
2677 __ mov(t0, zero_reg);
2679 __ JumpIfSmi(subject, &runtime);
2680 __ mov(a3, subject);
2704 Label seq_string , external_string ,
2705 check_underlying , not_seq_nor_cons ,
2715 __ Branch(&seq_string,
eq, a1, Operand(zero_reg));
2728 __ LoadRoot(a1, Heap::kempty_stringRootIndex);
2729 __ Branch(&runtime,
ne, a0, Operand(a1));
2733 __ bind(&check_underlying);
2741 __ Branch(&external_string,
ne, at, Operand(zero_reg));
2744 __ bind(&seq_string);
2751 __ JumpIfNotSmi(a1, &runtime);
2753 __ Branch(&runtime,
ls, a3, Operand(a1));
2763 __ Movz(t9, t1, a0);
2770 __ JumpIfSmi(t9, &runtime);
2778 __ IncrementCounter(isolate->counters()->regexp_entry_native(),
2782 const int kRegExpExecuteArguments = 9;
2783 const int kParameterRegisters = 4;
2784 __ EnterExitFrame(
false, kRegExpExecuteArguments - kParameterRegisters);
2802 __ li(a0, Operand(ExternalReference::isolate_address(isolate)));
2806 __ li(a0, Operand(1));
2810 __ li(a0, Operand(address_of_regexp_stack_memory_address));
2812 __ li(a2, Operand(address_of_regexp_stack_memory_size));
2814 __ addu(a0, a0, a2);
2819 __ mov(a0, zero_reg);
2824 ExternalReference::address_of_static_offsets_vector(isolate)));
2830 __ Xor(a3, a3, Operand(1));
2835 __ lw(subject,
MemOperand(
fp, kSubjectOffset + 2 * kPointerSize));
2840 __ sllv(t1, t0, a3);
2841 __ addu(t0, t2, t1);
2842 __ sllv(t1, a1, a3);
2843 __ addu(a2, t0, t1);
2847 __ sllv(t1, t2, a3);
2848 __ addu(a3, t0, t1);
2853 __ mov(a0, subject);
2857 DirectCEntryStub stub;
2858 stub.GenerateCall(masm, t9);
2860 __ LeaveExitFrame(
false,
no_reg,
true);
2868 __ Branch(&success,
eq, v0, Operand(1));
2879 __ li(a1, Operand(isolate->factory()->the_hole_value()));
2880 __ li(a2, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
2883 __ Branch(&runtime,
eq, v0, Operand(a1));
2888 __ LoadRoot(a0, Heap::kTerminationExceptionRootIndex);
2889 Label termination_exception;
2890 __ Branch(&termination_exception,
eq, v0, Operand(a0));
2894 __ bind(&termination_exception);
2895 __ ThrowUncatchable(v0);
2899 __ li(v0, Operand(isolate->factory()->null_value()));
2910 __ Addu(a1, a1, Operand(2));
2913 __ JumpIfSmi(a0, &runtime);
2914 __ GetObjectType(a0, a2, a2);
2917 __ lw(last_match_info_elements,
2920 __ LoadRoot(at, Heap::kFixedArrayMapRootIndex);
2921 __ Branch(&runtime,
ne, a0, Operand(at));
2928 __ Branch(&runtime,
gt, a2, Operand(at));
2940 __ mov(a2, subject);
2941 __ RecordWriteField(last_match_info_elements,
2947 __ mov(subject, a2);
2951 __ RecordWriteField(last_match_info_elements,
2959 ExternalReference address_of_static_offsets_vector =
2960 ExternalReference::address_of_static_offsets_vector(isolate);
2961 __ li(a2, Operand(address_of_static_offsets_vector));
2965 Label next_capture, done;
2969 last_match_info_elements,
2971 __ bind(&next_capture);
2972 __ Subu(a1, a1, Operand(1));
2973 __ Branch(&done,
lt, a1, Operand(zero_reg));
2976 __ addiu(a2, a2, kPointerSize);
2981 __ addiu(a0, a0, kPointerSize);
2991 __ TailCallRuntime(Runtime::kHiddenRegExpExec, 4, 1);
2995 __ bind(¬_seq_nor_cons);
3000 __ bind(&external_string);
3003 if (FLAG_debug_code) {
3008 kExternalStringExpectedButNotFound,
3019 __ jmp(&seq_string);
3022 __ bind(¬_long_external);
3025 __ Branch(&runtime,
ne, at, Operand(zero_reg));
3032 __ jmp(&check_underlying);
3033 #endif // V8_INTERPRETED_REGEXP
3037 static void GenerateRecordCallTarget(MacroAssembler* masm) {
3045 Label initialize, done, miss, megamorphic, not_array_function;
3048 masm->isolate()->heap()->megamorphic_symbol());
3050 masm->isolate()->heap()->uninitialized_symbol());
3054 __ Addu(t0, a2, Operand(t0));
3059 __ Branch(&done,
eq, t0, Operand(a1));
3061 if (!FLAG_pretenuring_call_new) {
3067 __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex);
3068 __ Branch(&miss,
ne, t1, Operand(at));
3072 __ Branch(&megamorphic,
ne, a1, Operand(t0));
3080 __ LoadRoot(at, Heap::kUninitializedSymbolRootIndex);
3081 __ Branch(&initialize,
eq, t0, Operand(at));
3084 __ bind(&megamorphic);
3086 __ Addu(t0, a2, Operand(t0));
3087 __ LoadRoot(at, Heap::kMegamorphicSymbolRootIndex);
3092 __ bind(&initialize);
3093 if (!FLAG_pretenuring_call_new) {
3096 __ Branch(¬_array_function,
ne, a1, Operand(t0));
3111 __ MultiPush(kSavedRegs);
3113 CreateAllocationSiteStub create_stub;
3114 __ CallStub(&create_stub);
3116 __ MultiPop(kSavedRegs);
3121 __ bind(¬_array_function);
3125 __ Addu(t0, a2, Operand(t0));
3129 __ Push(t0, a2, a1);
3143 Label slow, non_function, wrap, cont;
3145 if (NeedsChecks()) {
3148 __ JumpIfSmi(a1, &non_function);
3151 __ GetObjectType(a1, t0, t0);
3154 if (RecordCallTarget()) {
3155 GenerateRecordCallTarget(masm);
3159 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
3165 ParameterCount actual(argc_);
3167 if (CallAsMethod()) {
3168 if (NeedsChecks()) {
3172 int32_t strict_mode_function_mask =
3175 __ And(at, t0, Operand(strict_mode_function_mask | native_mask));
3176 __ Branch(&cont,
ne, at, Operand(zero_reg));
3182 if (NeedsChecks()) {
3183 __ JumpIfSmi(a3, &wrap);
3184 __ GetObjectType(a3, t0, t0);
3194 if (NeedsChecks()) {
3197 if (RecordCallTarget()) {
3202 masm->isolate()->heap()->megamorphic_symbol());
3204 __ Addu(t1, a2, Operand(t1));
3205 __ LoadRoot(at, Heap::kMegamorphicSymbolRootIndex);
3211 __ li(a0, Operand(argc_ + 1, RelocInfo::NONE32));
3212 __ li(a2, Operand(0, RelocInfo::NONE32));
3213 __ GetBuiltinFunction(a1, Builtins::CALL_FUNCTION_PROXY);
3215 Handle<Code> adaptor =
3216 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline();
3217 __ Jump(adaptor, RelocInfo::CODE_TARGET);
3222 __ bind(&non_function);
3224 __ li(a0, Operand(argc_));
3225 __ li(a2, Operand(0, RelocInfo::NONE32));
3226 __ GetBuiltinFunction(a1, Builtins::CALL_NON_FUNCTION);
3227 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
3228 RelocInfo::CODE_TARGET);
3231 if (CallAsMethod()) {
3251 Label slow, non_function_call;
3254 __ JumpIfSmi(a1, &non_function_call);
3256 __ GetObjectType(a1, t0, t0);
3259 if (RecordCallTarget()) {
3260 GenerateRecordCallTarget(masm);
3263 __ Addu(t1, a2, at);
3264 if (FLAG_pretenuring_call_new) {
3270 Label feedback_register_initialized;
3274 __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex);
3275 __ Branch(&feedback_register_initialized,
eq, t1, Operand(at));
3276 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
3277 __ bind(&feedback_register_initialized);
3280 __ AssertUndefinedOrAllocationSite(a2, t1);
3284 Register jmp_reg = t0;
3297 __ GetBuiltinFunction(a1, Builtins::CALL_FUNCTION_PROXY_AS_CONSTRUCTOR);
3300 __ bind(&non_function_call);
3301 __ GetBuiltinFunction(a1, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR);
3304 __ li(a2, Operand(0, RelocInfo::NONE32));
3305 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
3306 RelocInfo::CODE_TARGET);
3314 Label got_char_code;
3315 Label sliced_string;
3322 __ JumpIfSmi(object_, receiver_not_string_);
3329 __ Branch(receiver_not_string_,
ne, t0, Operand(zero_reg));
3332 __ JumpIfNotSmi(index_, &index_not_smi_);
3334 __ bind(&got_smi_index_);
3338 __ Branch(index_out_of_range_,
ls, t0, Operand(index_));
3354 MacroAssembler* masm,
3355 const RuntimeCallHelper& call_helper) {
3356 __ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase);
3359 __ bind(&index_not_smi_);
3363 Heap::kHeapNumberMapRootIndex,
3366 call_helper.BeforeCall(masm);
3368 __ Push(object_, index_);
3370 __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero, 1);
3374 __ CallRuntime(Runtime::kHiddenNumberToSmi, 1);
3380 __ Move(index_, v0);
3385 call_helper.AfterCall(masm);
3387 __ JumpIfNotSmi(index_, index_out_of_range_);
3389 __ Branch(&got_smi_index_);
3394 __ bind(&call_runtime_);
3395 call_helper.BeforeCall(masm);
3397 __ Push(object_, index_);
3398 __ CallRuntime(Runtime::kHiddenStringCharCodeAt, 2);
3400 __ Move(result_, v0);
3402 call_helper.AfterCall(masm);
3405 __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase);
3425 __ Branch(&slow_case_,
ne, t0, Operand(zero_reg));
3427 __ LoadRoot(result_, Heap::kSingleCharacterStringCacheRootIndex);
3431 __ Addu(result_, result_, t0);
3433 __ LoadRoot(t0, Heap::kUndefinedValueRootIndex);
3434 __ Branch(&slow_case_,
eq, result_, Operand(t0));
3440 MacroAssembler* masm,
3441 const RuntimeCallHelper& call_helper) {
3442 __ Abort(kUnexpectedFallthroughToCharFromCodeSlowCase);
3444 __ bind(&slow_case_);
3445 call_helper.BeforeCall(masm);
3447 __ CallRuntime(Runtime::kCharFromCode, 1);
3448 __ Move(result_, v0);
3450 call_helper.AfterCall(masm);
3453 __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase);
3457 enum CopyCharactersFlags {
3459 DEST_ALWAYS_ALIGNED = 2
3473 bool ascii = (flags & COPY_ASCII) != 0;
3474 bool dest_always_aligned = (flags & DEST_ALWAYS_ALIGNED) != 0;
3476 if (dest_always_aligned && FLAG_debug_code) {
3481 kDestinationOfCopyNotAligned,
3486 const int kReadAlignment = 4;
3487 const int kReadAlignmentMask = kReadAlignment - 1;
3497 __ addu(count, count, count);
3499 __ Branch(&done,
eq, count, Operand(zero_reg));
3503 __ Subu(scratch1, count, Operand(8));
3504 __ Addu(count, dest, Operand(count));
3505 Register limit = count;
3506 __ Branch(&byte_loop,
lt, scratch1, Operand(zero_reg));
3508 if (!dest_always_aligned) {
3510 __ And(scratch4, dest, Operand(kReadAlignmentMask));
3512 __ Branch(&dest_aligned,
eq, scratch4, Operand(zero_reg));
3514 __ bind(&aligned_loop);
3516 __ addiu(src, src, 1);
3518 __ addiu(dest, dest, 1);
3519 __ addiu(scratch4, scratch4, 1);
3520 __ Branch(&aligned_loop,
le, scratch4, Operand(kReadAlignmentMask));
3521 __ bind(&dest_aligned);
3526 __ And(scratch4, src, Operand(kReadAlignmentMask));
3527 __ Branch(&simple_loop,
eq, scratch4, Operand(zero_reg));
3536 __ Addu(src, src, Operand(kReadAlignment));
3539 __ Addu(dest, dest, Operand(kReadAlignment));
3540 __ Subu(scratch2, limit, dest);
3541 __ Branch(&loop,
ge, scratch2, Operand(kReadAlignment));
3544 __ Branch(&byte_loop);
3549 __ bind(&simple_loop);
3554 __ Addu(src, src, Operand(kReadAlignment));
3556 __ Addu(dest, dest, Operand(kReadAlignment));
3557 __ Subu(scratch2, limit, dest);
3558 __ Branch(&loop,
ge, scratch2, Operand(kReadAlignment));
3562 __ bind(&byte_loop);
3564 __ Branch(&done,
ge, dest, Operand(limit));
3566 __ addiu(src, src, 1);
3568 __ addiu(dest, dest, 1);
3569 __ Branch(&byte_loop);
3577 Register character) {
3579 __ LoadRoot(hash, Heap::kHashSeedRootIndex);
3582 __ addu(hash, hash, character);
3583 __ sll(at, hash, 10);
3584 __ addu(hash, hash, at);
3586 __ srl(at, hash, 6);
3587 __ xor_(hash, hash, at);
3593 Register character) {
3595 __ addu(hash, hash, character);
3597 __ sll(at, hash, 10);
3598 __ addu(hash, hash, at);
3600 __ srl(at, hash, 6);
3601 __ xor_(hash, hash, at);
3608 __ sll(at, hash, 3);
3609 __ addu(hash, hash, at);
3611 __ srl(at, hash, 11);
3612 __ xor_(hash, hash, at);
3614 __ sll(at, hash, 15);
3615 __ addu(hash, hash, at);
3618 __ and_(hash, hash, at);
3622 __ Movz(hash, at, hash);
3626 void SubStringStub::Generate(MacroAssembler* masm) {
3653 __ UntagAndJumpIfNotSmi(a2, a2, &runtime);
3654 __ UntagAndJumpIfNotSmi(a3, a3, &runtime);
3657 __ Branch(&runtime,
lt, a3, Operand(zero_reg));
3659 __ Branch(&runtime,
gt, a3, Operand(a2));
3660 __ Subu(a2, a2, a3);
3664 __ JumpIfSmi(v0, &runtime);
3669 __ Branch(&runtime,
ne, t0, Operand(zero_reg));
3672 __ Branch(&single_char,
eq, a2, Operand(1));
3681 __ Branch(&return_v0,
eq, a2, Operand(t0));
3683 __ Branch(&runtime,
hi, a2, Operand(t0));
3692 Label underlying_unpacked, sliced_string, seq_or_external_string;
3700 __ Branch(&sliced_string,
ne, t0, Operand(zero_reg));
3703 __ LoadRoot(t0, Heap::kempty_stringRootIndex);
3704 __ Branch(&runtime,
ne, t1, Operand(t0));
3709 __ jmp(&underlying_unpacked);
3711 __ bind(&sliced_string);
3716 __ Addu(a3, a3, t0);
3720 __ jmp(&underlying_unpacked);
3722 __ bind(&seq_or_external_string);
3726 __ bind(&underlying_unpacked);
3728 if (FLAG_string_slices) {
3741 Label two_byte_slice, set_slice_header;
3745 __ Branch(&two_byte_slice,
eq, t0, Operand(zero_reg));
3746 __ AllocateAsciiSlicedString(v0, a2, t2, t3, &runtime);
3747 __ jmp(&set_slice_header);
3748 __ bind(&two_byte_slice);
3749 __ AllocateTwoByteSlicedString(v0, a2, t2, t3, &runtime);
3750 __ bind(&set_slice_header);
3756 __ bind(©_routine);
3763 Label two_byte_sequential, sequential_string, allocate_result;
3767 __ Branch(&sequential_string,
eq, t0, Operand(zero_reg));
3773 __ Branch(&runtime,
ne, t0, Operand(zero_reg));
3776 __ jmp(&allocate_result);
3778 __ bind(&sequential_string);
3783 __ bind(&allocate_result);
3787 __ Branch(&two_byte_sequential,
eq, t0, Operand(zero_reg));
3790 __ AllocateAsciiString(v0, a2, t0, t2, t3, &runtime);
3793 __ Addu(t1, t1, a3);
3804 masm, a1, t1, a2, a3, t0, t2, t3, t4, COPY_ASCII | DEST_ALWAYS_ALIGNED);
3808 __ bind(&two_byte_sequential);
3809 __ AllocateTwoByteString(v0, a2, t0, t2, t3, &runtime);
3814 __ Addu(t1, t1, t0);
3824 masm, a1, t1, a2, a3, t0, t2, t3, t4, DEST_ALWAYS_ALIGNED);
3826 __ bind(&return_v0);
3827 Counters* counters = masm->isolate()->counters();
3828 __ IncrementCounter(counters->sub_string_native(), 1, a3, t0);
3833 __ TailCallRuntime(Runtime::kHiddenSubString, 3, 1);
3835 __ bind(&single_char);
3841 StringCharAtGenerator generator(
3843 generator.GenerateFast(masm);
3845 generator.SkipSlow(masm, &runtime);
3854 Register scratch3) {
3855 Register length = scratch1;
3858 Label strings_not_equal, check_zero_length;
3861 __ Branch(&check_zero_length,
eq, length, Operand(scratch2));
3862 __ bind(&strings_not_equal);
3868 Label compare_chars;
3869 __ bind(&check_zero_length);
3871 __ Branch(&compare_chars,
ne, length, Operand(zero_reg));
3877 __ bind(&compare_chars);
3879 GenerateAsciiCharsCompareLoop(masm,
3880 left, right, length, scratch2, scratch3, v0,
3881 &strings_not_equal);
3895 Register scratch4) {
3896 Label result_not_equal, compare_lengths;
3900 __ Subu(scratch3, scratch1, Operand(scratch2));
3901 Register length_delta = scratch3;
3902 __ slt(scratch4, scratch2, scratch1);
3903 __ Movn(scratch1, scratch2, scratch4);
3904 Register min_length = scratch1;
3906 __ Branch(&compare_lengths,
eq, min_length, Operand(zero_reg));
3909 GenerateAsciiCharsCompareLoop(masm,
3910 left, right, min_length, scratch2, scratch4, v0,
3914 __ bind(&compare_lengths);
3917 __ mov(scratch2, length_delta);
3918 __ mov(scratch4, zero_reg);
3919 __ mov(v0, zero_reg);
3921 __ bind(&result_not_equal);
3925 __ Branch(&ret,
eq, scratch2, Operand(scratch4));
3927 __ Branch(&ret,
gt, scratch2, Operand(scratch4));
3934 void StringCompareStub::GenerateAsciiCharsCompareLoop(
3935 MacroAssembler* masm,
3942 Label* chars_not_equal) {
3946 __ SmiUntag(length);
3947 __ Addu(scratch1, length,
3949 __ Addu(left, left, Operand(scratch1));
3950 __ Addu(right, right, Operand(scratch1));
3951 __ Subu(length, zero_reg, length);
3952 Register index = length;
3958 __ Addu(scratch3, left, index);
3960 __ Addu(scratch3, right, index);
3962 __ Branch(chars_not_equal,
ne, scratch1, Operand(scratch2));
3963 __ Addu(index, index, 1);
3964 __ Branch(&loop,
ne, index, Operand(zero_reg));
3968 void StringCompareStub::Generate(MacroAssembler* masm) {
3971 Counters* counters = masm->isolate()->counters();
3980 __ Branch(¬_same,
ne, a0, Operand(a1));
3984 __ IncrementCounter(counters->string_compare_native(), 1, a1, a2);
3990 __ JumpIfNotBothSequentialAsciiStrings(a1, a0, a2, a3, &runtime);
3993 __ IncrementCounter(counters->string_compare_native(), 1, a2, a3);
3994 __ Addu(
sp,
sp, Operand(2 * kPointerSize));
3998 __ TailCallRuntime(Runtime::kHiddenStringCompare, 2, 1);
4003 Register receiver = a0;
4004 Register scratch = a1;
4006 int argc = arguments_count();
4011 __ DropAndRet(argc + 1);
4015 Isolate* isolate = masm->isolate();
4018 __ TailCallExternalReference(
4019 ExternalReference(Builtins::c_ArrayPush, isolate), argc + 1, 1);
4023 Label call_builtin, attempt_to_grow_elements, with_write_barrier;
4025 Register elements = t2;
4026 Register end_elements = t1;
4032 __ CheckMap(elements,
4034 Heap::kFixedArrayMapRootIndex,
4046 const int kEndElementsOffset =
4051 __ Branch(&attempt_to_grow_elements,
gt, scratch, Operand(t0));
4055 __ JumpIfNotSmi(t0, &with_write_barrier);
4061 __ Addu(end_elements, elements, end_elements);
4062 __ Addu(end_elements, end_elements, kEndElementsOffset);
4066 __ Branch(&call_builtin,
gt, scratch, Operand(t0));
4069 __ StoreNumberToDoubleElements(t0, scratch, elements, a3, t1, a2,
4070 &call_builtin, argc * kDoubleSize);
4075 __ mov(v0, scratch);
4076 __ DropAndRet(argc + 1);
4079 __ bind(&call_builtin);
4080 __ TailCallExternalReference(
4081 ExternalReference(Builtins::c_ArrayPush, isolate), argc + 1, 1);
4085 __ bind(&with_write_barrier);
4088 if (FLAG_trace_elements_transitions)
__ jmp(&call_builtin);
4091 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
4092 __ Branch(&call_builtin,
eq, t3, Operand(at));
4101 const int origin_offset = header_size + elements_kind() *
kPointerSize;
4104 __ Branch(&call_builtin,
ne, a2, Operand(at));
4107 const int target_offset = header_size + target_kind *
kPointerSize;
4109 __ mov(a2, receiver);
4121 __ Addu(end_elements, elements, end_elements);
4122 __ Addu(end_elements, end_elements, kEndElementsOffset);
4125 __ RecordWrite(elements,
4132 __ mov(v0, scratch);
4133 __ DropAndRet(argc + 1);
4135 __ bind(&attempt_to_grow_elements);
4138 if (!FLAG_inline_new) {
4139 __ bind(&call_builtin);
4140 __ TailCallExternalReference(
4141 ExternalReference(Builtins::c_ArrayPush, isolate), argc + 1, 1);
4149 __ JumpIfNotSmi(a2, &call_builtin);
4155 ExternalReference new_space_allocation_top =
4156 ExternalReference::new_space_allocation_top_address(isolate);
4157 ExternalReference new_space_allocation_limit =
4158 ExternalReference::new_space_allocation_limit_address(isolate);
4160 const int kAllocationDelta = 4;
4161 ASSERT(kAllocationDelta >= argc);
4164 __ Addu(end_elements, elements, end_elements);
4165 __ Addu(end_elements, end_elements, Operand(kEndElementsOffset));
4166 __ li(t0, Operand(new_space_allocation_top));
4168 __ Branch(&call_builtin,
ne, a3, Operand(end_elements));
4170 __ li(t3, Operand(new_space_allocation_limit));
4172 __ Addu(a3, a3, Operand(kAllocationDelta * kPointerSize));
4173 __ Branch(&call_builtin,
hi, a3, Operand(t3));
4181 __ LoadRoot(a3, Heap::kTheHoleValueRootIndex);
4182 for (
int i = 1; i < kAllocationDelta; i++) {
4193 __ mov(v0, scratch);
4194 __ DropAndRet(argc + 1);
4196 __ bind(&call_builtin);
4197 __ TailCallExternalReference(
4198 ExternalReference(Builtins::c_ArrayPush, isolate), argc + 1, 1);
4202 void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) {
4208 Isolate* isolate = masm->isolate();
4213 __ li(a2,
handle(isolate->heap()->undefined_value()));
4216 if (FLAG_debug_code) {
4218 __ Assert(
ne, kExpectedAllocationSite, at, Operand(zero_reg));
4220 __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex);
4221 __ Assert(
eq, kExpectedAllocationSite, t0, Operand(at));
4226 BinaryOpWithAllocationSiteStub stub(state_);
4227 __ TailCallStub(&stub);
4231 void ICCompareStub::GenerateSmis(MacroAssembler* masm) {
4235 __ JumpIfNotSmi(a2, &miss);
4237 if (GetCondition() ==
eq) {
4240 __ Subu(v0, a0, a1);
4246 __ Subu(v0, a1, a0);
4254 void ICCompareStub::GenerateNumbers(MacroAssembler* masm) {
4258 Label unordered, maybe_undefined1, maybe_undefined2;
4262 __ JumpIfNotSmi(a1, &miss);
4265 __ JumpIfNotSmi(a0, &miss);
4271 Label done, left, left_smi, right_smi;
4272 __ JumpIfSmi(a0, &right_smi);
4273 __ CheckMap(a0, a2, Heap::kHeapNumberMapRootIndex, &maybe_undefined1,
4278 __ bind(&right_smi);
4279 __ SmiUntag(a2, a0);
4280 FPURegister single_scratch =
f6;
4281 __ mtc1(a2, single_scratch);
4282 __ cvt_d_w(
f2, single_scratch);
4285 __ JumpIfSmi(a1, &left_smi);
4286 __ CheckMap(a1, a2, Heap::kHeapNumberMapRootIndex, &maybe_undefined2,
4292 __ SmiUntag(a2, a1);
4293 single_scratch =
f8;
4294 __ mtc1(a2, single_scratch);
4295 __ cvt_d_w(
f0, single_scratch);
4300 Label fpu_eq, fpu_lt;
4302 __ BranchF(&fpu_eq, &unordered,
eq,
f0,
f2);
4318 __ li(v0, Operand(
LESS));
4320 __ bind(&unordered);
4321 __ bind(&generic_stub);
4324 __ Jump(stub.GetCode(masm->isolate()), RelocInfo::CODE_TARGET);
4326 __ bind(&maybe_undefined1);
4328 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
4329 __ Branch(&miss,
ne, a0, Operand(at));
4330 __ JumpIfSmi(a1, &unordered);
4331 __ GetObjectType(a1, a2, a2);
4336 __ bind(&maybe_undefined2);
4338 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
4339 __ Branch(&unordered,
eq, a1, Operand(at));
4347 void ICCompareStub::GenerateInternalizedStrings(MacroAssembler* masm) {
4353 Register right = a0;
4358 __ JumpIfEitherSmi(left, right, &miss);
4366 __ Or(tmp1, tmp1, Operand(tmp2));
4368 __ Branch(&miss,
ne, at, Operand(zero_reg));
4377 __ Ret(
ne, left, Operand(right));
4387 void ICCompareStub::GenerateUniqueNames(MacroAssembler* masm) {
4394 Register right = a0;
4399 __ JumpIfEitherSmi(left, right, &miss);
4408 __ JumpIfNotUniqueName(tmp1, &miss);
4409 __ JumpIfNotUniqueName(tmp2, &miss);
4416 __ Branch(&done,
ne, left, Operand(right));
4431 void ICCompareStub::GenerateStrings(MacroAssembler* masm) {
4439 Register right = a0;
4447 __ JumpIfEitherSmi(left, right, &miss);
4456 __ Or(tmp3, tmp1, tmp2);
4458 __ Branch(&miss,
ne, tmp5, Operand(zero_reg));
4461 Label left_ne_right;
4464 __ Branch(&left_ne_right,
ne, left, Operand(right));
4466 __ mov(v0, zero_reg);
4467 __ bind(&left_ne_right);
4477 __ Or(tmp3, tmp1, Operand(tmp2));
4480 __ Branch(&is_symbol,
ne, tmp5, Operand(zero_reg));
4486 __ bind(&is_symbol);
4491 __ JumpIfBothInstanceTypesAreNotSequentialAscii(
4492 tmp1, tmp2, tmp3, tmp4, &runtime);
4497 masm, left, right, tmp1, tmp2, tmp3);
4500 masm, left, right, tmp1, tmp2, tmp3, tmp4);
4505 __ Push(left, right);
4507 __ TailCallRuntime(Runtime::kStringEquals, 2, 1);
4509 __ TailCallRuntime(Runtime::kHiddenStringCompare, 2, 1);
4517 void ICCompareStub::GenerateObjects(MacroAssembler* masm) {
4520 __ And(a2, a1, Operand(a0));
4521 __ JumpIfSmi(a2, &miss);
4523 __ GetObjectType(a0, a2, a2);
4525 __ GetObjectType(a1, a2, a2);
4530 __ subu(v0, a0, a1);
4537 void ICCompareStub::GenerateKnownObjects(MacroAssembler* masm) {
4540 __ JumpIfSmi(a2, &miss);
4543 __ Branch(&miss,
ne, a2, Operand(known_map_));
4544 __ Branch(&miss,
ne, a3, Operand(known_map_));
4547 __ subu(v0, a0, a1);
4554 void ICCompareStub::GenerateMiss(MacroAssembler* masm) {
4557 ExternalReference miss =
4558 ExternalReference(IC_Utility(IC::kCompareIC_Miss), masm->isolate());
4561 __ Push(ra, a1, a0);
4563 __ addiu(
sp,
sp, -kPointerSize);
4593 __ Assert(
ne, kReceivedInvalidReturnAddress, t0,
4594 Operand(reinterpret_cast<uint32_t>(
kZapValue)));
4603 reinterpret_cast<intptr_t
>(GetCode(masm->isolate()).location());
4604 __ Move(t9, target);
4614 Register properties,
4616 Register scratch0) {
4617 ASSERT(name->IsUniqueName());
4623 for (
int i = 0; i < kInlinedProbes; i++) {
4626 Register index = scratch0;
4629 __ Subu(index, index, Operand(1));
4630 __ And(index, index, Operand(
4631 Smi::FromInt(name->Hash() + NameDictionary::GetProbeOffset(i))));
4635 __ sll(at, index, 1);
4636 __ Addu(index, index, at);
4638 Register entity_name = scratch0;
4641 Register tmp = properties;
4642 __ sll(scratch0, index, 1);
4643 __ Addu(tmp, properties, scratch0);
4646 ASSERT(!tmp.is(entity_name));
4647 __ LoadRoot(tmp, Heap::kUndefinedValueRootIndex);
4648 __ Branch(done,
eq, entity_name, Operand(tmp));
4651 __ LoadRoot(tmp, Heap::kTheHoleValueRootIndex);
4654 __ Branch(miss,
eq, entity_name, Operand(Handle<Name>(name)));
4657 __ Branch(&good,
eq, entity_name, Operand(tmp));
4663 __ JumpIfNotUniqueName(entity_name, miss);
4671 const int spill_mask =
4672 (ra.bit() | t2.bit() | t1.bit() | t0.bit() | a3.bit() |
4673 a2.bit() | a1.bit() | a0.bit() | v0.bit());
4675 __ MultiPush(spill_mask);
4677 __ li(a1, Operand(Handle<Name>(name)));
4681 __ MultiPop(spill_mask);
4683 __ Branch(done,
eq, at, Operand(zero_reg));
4684 __ Branch(miss,
ne, at, Operand(zero_reg));
4698 Register scratch2) {
4699 ASSERT(!elements.is(scratch1));
4700 ASSERT(!elements.is(scratch2));
4701 ASSERT(!name.is(scratch1));
4702 ASSERT(!name.is(scratch2));
4704 __ AssertName(name);
4709 __ Subu(scratch1, scratch1, Operand(1));
4714 for (
int i = 0; i < kInlinedProbes; i++) {
4721 ASSERT(NameDictionary::GetProbeOffset(i) <
4723 __ Addu(scratch2, scratch2, Operand(
4727 __ And(scratch2, scratch1, scratch2);
4733 __ sll(at, scratch2, 1);
4734 __ Addu(scratch2, scratch2, at);
4737 __ sll(at, scratch2, 2);
4738 __ Addu(scratch2, elements, at);
4740 __ Branch(done,
eq, name, Operand(at));
4743 const int spill_mask =
4744 (ra.bit() | t2.bit() | t1.bit() | t0.bit() |
4745 a3.bit() | a2.bit() | a1.bit() | a0.bit() | v0.bit()) &
4746 ~(scratch1.bit() | scratch2.bit());
4748 __ MultiPush(spill_mask);
4750 ASSERT(!elements.is(a1));
4752 __ Move(a0, elements);
4754 __ Move(a0, elements);
4759 __ mov(scratch2, a2);
4761 __ MultiPop(spill_mask);
4763 __ Branch(done,
ne, at, Operand(zero_reg));
4764 __ Branch(miss,
eq, at, Operand(zero_reg));
4780 Register result = v0;
4781 Register dictionary = a0;
4783 Register index = a2;
4786 Register undefined = t1;
4787 Register entry_key = t2;
4789 Label in_dictionary, maybe_in_dictionary, not_in_dictionary;
4793 __ Subu(mask, mask, Operand(1));
4797 __ LoadRoot(undefined, Heap::kUndefinedValueRootIndex);
4799 for (
int i = kInlinedProbes; i < kTotalProbes; i++) {
4806 ASSERT(NameDictionary::GetProbeOffset(i) <
4808 __ Addu(index, hash, Operand(
4811 __ mov(index, hash);
4814 __ And(index, mask, index);
4820 __ sll(index, index, 1);
4821 __ Addu(index, index, at);
4825 __ sll(index, index, 2);
4826 __ Addu(index, index, dictionary);
4830 __ Branch(¬_in_dictionary,
eq, entry_key, Operand(undefined));
4833 __ Branch(&in_dictionary,
eq, entry_key, Operand(key));
4840 __ JumpIfNotUniqueName(entry_key, &maybe_in_dictionary);
4844 __ bind(&maybe_in_dictionary);
4850 __ mov(result, zero_reg);
4853 __ bind(&in_dictionary);
4857 __ bind(¬_in_dictionary);
4859 __ mov(result, zero_reg);
4866 stub1.GetCode(isolate);
4869 stub2.GetCode(isolate);
4873 bool CodeStub::CanUseFPRegisters() {
4882 void RecordWriteStub::Generate(MacroAssembler* masm) {
4883 Label skip_to_incremental_noncompacting;
4884 Label skip_to_incremental_compacting;
4892 __ beq(zero_reg, zero_reg, &skip_to_incremental_noncompacting);
4894 __ beq(zero_reg, zero_reg, &skip_to_incremental_compacting);
4898 __ RememberedSetHelper(object_,
4906 __ bind(&skip_to_incremental_noncompacting);
4909 __ bind(&skip_to_incremental_compacting);
4920 void RecordWriteStub::GenerateIncremental(MacroAssembler* masm, Mode mode) {
4924 Label dont_need_remembered_set;
4926 __ lw(regs_.scratch0(),
MemOperand(regs_.address(), 0));
4927 __ JumpIfNotInNewSpace(regs_.scratch0(),
4929 &dont_need_remembered_set);
4931 __ CheckPageFlag(regs_.object(),
4935 &dont_need_remembered_set);
4939 CheckNeedsToInformIncrementalMarker(
4940 masm, kUpdateRememberedSetOnNoNeedToInformIncrementalMarker, mode);
4941 InformIncrementalMarker(masm);
4942 regs_.Restore(masm);
4943 __ RememberedSetHelper(object_,
4949 __ bind(&dont_need_remembered_set);
4952 CheckNeedsToInformIncrementalMarker(
4953 masm, kReturnOnNoNeedToInformIncrementalMarker, mode);
4954 InformIncrementalMarker(masm);
4955 regs_.Restore(masm);
4960 void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm) {
4961 regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode_);
4962 int argument_count = 3;
4963 __ PrepareCallCFunction(argument_count, regs_.scratch0());
4965 a0.is(regs_.address()) ? regs_.scratch0() : regs_.address();
4966 ASSERT(!address.is(regs_.object()));
4968 __ Move(address, regs_.address());
4969 __ Move(a0, regs_.object());
4970 __ Move(a1, address);
4971 __ li(a2, Operand(ExternalReference::isolate_address(masm->isolate())));
4973 AllowExternalCallThatCantCauseGC scope(masm);
4975 ExternalReference::incremental_marking_record_write_function(
4978 regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode_);
4982 void RecordWriteStub::CheckNeedsToInformIncrementalMarker(
4983 MacroAssembler* masm,
4984 OnNoNeedToInformIncrementalMarker on_no_need,
4987 Label need_incremental;
4988 Label need_incremental_pop_scratch;
4991 __ lw(regs_.scratch1(),
4994 __ Subu(regs_.scratch1(), regs_.scratch1(), Operand(1));
4995 __ sw(regs_.scratch1(),
4998 __ Branch(&need_incremental,
lt, regs_.scratch1(), Operand(zero_reg));
5002 __ JumpIfBlack(regs_.object(), regs_.scratch0(), regs_.scratch1(), &on_black);
5004 regs_.Restore(masm);
5005 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
5006 __ RememberedSetHelper(object_,
5018 __ lw(regs_.scratch0(),
MemOperand(regs_.address(), 0));
5021 Label ensure_not_white;
5023 __ CheckPageFlag(regs_.scratch0(),
5029 __ CheckPageFlag(regs_.object(),
5035 __ bind(&ensure_not_white);
5040 __ Push(regs_.object(), regs_.address());
5041 __ EnsureNotWhite(regs_.scratch0(),
5045 &need_incremental_pop_scratch);
5046 __ Pop(regs_.object(), regs_.address());
5048 regs_.Restore(masm);
5049 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
5050 __ RememberedSetHelper(object_,
5059 __ bind(&need_incremental_pop_scratch);
5060 __ Pop(regs_.object(), regs_.address());
5062 __ bind(&need_incremental);
5068 void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) {
5078 Label double_elements;
5080 Label slow_elements;
5081 Label fast_elements;
5088 __ CheckFastElements(a2, t1, &double_elements);
5090 __ JumpIfSmi(a0, &smi_element);
5091 __ CheckFastSmiElements(a2, t1, &fast_elements);
5095 __ bind(&slow_elements);
5097 __ Push(a1, a3, a0);
5101 __ TailCallRuntime(Runtime::kStoreArrayLiteralElement, 5, 1);
5104 __ bind(&fast_elements);
5107 __ Addu(t2, t1, t2);
5118 __ bind(&smi_element);
5121 __ Addu(t2, t1, t2);
5127 __ bind(&double_elements);
5129 __ StoreNumberToDoubleElements(a0, a3, t1, t3, t5, a2, &slow_elements);
5135 void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
5137 __ Call(ces.GetCode(masm->isolate()), RelocInfo::CODE_TARGET);
5138 int parameter_count_offset =
5142 __ Addu(a1, a1, Operand(1));
5144 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE);
5152 if (masm->isolate()->function_entry_hook() !=
NULL) {
5161 void ProfileEntryHookStub::Generate(MacroAssembler* masm) {
5164 const int32_t kReturnAddressDistanceFromFunctionStart =
5176 __ MultiPush(kSavedRegs | ra.bit());
5179 __ Subu(a0, ra, Operand(kReturnAddressDistanceFromFunctionStart));
5183 __ Addu(a1,
sp, Operand(kNumSavedRegs * kPointerSize));
5186 int frame_alignment = masm->ActivationFrameAlignment();
5187 if (frame_alignment > kPointerSize) {
5190 __ And(
sp,
sp, Operand(-frame_alignment));
5193 #if defined(V8_HOST_ARCH_MIPS)
5195 reinterpret_cast<int32_t>(masm->isolate()->function_entry_hook());
5196 __ li(t9, Operand(entry_hook));
5201 __ li(a2, Operand(ExternalReference::isolate_address(masm->isolate())));
5204 __ li(t9, Operand(ExternalReference(&dispatcher,
5205 ExternalReference::BUILTIN_CALL,
5212 if (frame_alignment > kPointerSize) {
5219 __ MultiPop(kSavedRegs | ra.bit());
5225 static void CreateArrayDispatch(MacroAssembler* masm,
5229 __ TailCallStub(&stub);
5233 for (
int i = 0; i <= last_index; ++i) {
5236 __ TailCallStub(&stub, eq, a3, Operand(kind));
5240 __ Abort(kUnexpectedElementsKindInArrayConstructor);
5247 static void CreateArrayDispatchOneArgument(MacroAssembler* masm,
5254 Label normal_sequence;
5264 __ And(at, a3, Operand(1));
5265 __ Branch(&normal_sequence, ne, at, Operand(zero_reg));
5270 __ Branch(&normal_sequence, eq, t1, Operand(zero_reg));
5276 ArraySingleArgumentConstructorStub stub_holey(holey_initial,
5278 __ TailCallStub(&stub_holey);
5280 __ bind(&normal_sequence);
5281 ArraySingleArgumentConstructorStub stub(initial,
5283 __ TailCallStub(&stub);
5287 __ Addu(a3, a3, Operand(1));
5289 if (FLAG_debug_code) {
5291 __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex);
5292 __ Assert(eq, kExpectedAllocationSite, t1, Operand(at));
5304 __ bind(&normal_sequence);
5307 for (
int i = 0; i <= last_index; ++i) {
5309 ArraySingleArgumentConstructorStub stub(kind);
5310 __ TailCallStub(&stub, eq, a3, Operand(kind));
5314 __ Abort(kUnexpectedElementsKindInArrayConstructor);
5322 static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) {
5325 for (
int i = 0; i <= to_index; ++i) {
5328 stub.GetCode(isolate);
5331 stub1.GetCode(isolate);
5338 ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>(
5340 ArrayConstructorStubAheadOfTimeHelper<ArraySingleArgumentConstructorStub>(
5342 ArrayConstructorStubAheadOfTimeHelper<ArrayNArgumentsConstructorStub>(
5350 for (
int i = 0; i < 2; i++) {
5352 InternalArrayNoArgumentConstructorStub stubh1(kinds[i]);
5353 stubh1.GetCode(isolate);
5354 InternalArraySingleArgumentConstructorStub stubh2(kinds[i]);
5355 stubh2.GetCode(isolate);
5356 InternalArrayNArgumentsConstructorStub stubh3(kinds[i]);
5357 stubh3.GetCode(isolate);
5362 void ArrayConstructorStub::GenerateDispatchToArrayStub(
5363 MacroAssembler* masm,
5365 if (argument_count_ ==
ANY) {
5366 Label not_zero_case, not_one_case;
5368 __ Branch(¬_zero_case, ne, at, Operand(zero_reg));
5369 CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm,
mode);
5371 __ bind(¬_zero_case);
5372 __ Branch(¬_one_case,
gt, a0, Operand(1));
5373 CreateArrayDispatchOneArgument(masm, mode);
5375 __ bind(¬_one_case);
5376 CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm,
mode);
5377 }
else if (argument_count_ ==
NONE) {
5378 CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm,
mode);
5379 }
else if (argument_count_ ==
ONE) {
5380 CreateArrayDispatchOneArgument(masm, mode);
5382 CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm,
mode);
5398 if (FLAG_debug_code) {
5406 __ Assert(ne, kUnexpectedInitialMapForArrayFunction,
5407 at, Operand(zero_reg));
5408 __ GetObjectType(t0, t0, t1);
5409 __ Assert(eq, kUnexpectedInitialMapForArrayFunction,
5413 __ AssertUndefinedOrAllocationSite(a2, t0);
5418 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
5419 __ Branch(&no_info, eq, a2, Operand(at));
5432 void InternalArrayConstructorStub::GenerateCase(
5435 InternalArrayNoArgumentConstructorStub stub0(kind);
5436 __ TailCallStub(&stub0,
lo, a0, Operand(1));
5438 InternalArrayNArgumentsConstructorStub stubN(kind);
5439 __ TailCallStub(&stubN,
hi, a0, Operand(1));
5446 InternalArraySingleArgumentConstructorStub
5448 __ TailCallStub(&stub1_holey, ne, at, Operand(zero_reg));
5451 InternalArraySingleArgumentConstructorStub stub1(kind);
5452 __ TailCallStub(&stub1);
5464 if (FLAG_debug_code) {
5472 __ Assert(ne, kUnexpectedInitialMapForArrayFunction,
5473 at, Operand(zero_reg));
5474 __ GetObjectType(a3, a3, t0);
5475 __ Assert(eq, kUnexpectedInitialMapForArrayFunction,
5488 if (FLAG_debug_code) {
5492 eq, kInvalidElementsKindForInternalArrayOrInternalPackedArray,
5497 Label fast_elements_case;
5501 __ bind(&fast_elements_case);
5506 void CallApiFunctionStub::Generate(MacroAssembler* masm) {
5520 Register callee = a0;
5521 Register call_data = t0;
5522 Register holder = a2;
5523 Register api_function_address = a1;
5524 Register context =
cp;
5526 int argc = ArgumentBits::decode(bit_field_);
5527 bool is_store = IsStoreBits::decode(bit_field_);
5528 bool call_data_undefined = CallDataUndefinedBits::decode(bit_field_);
5530 typedef FunctionCallbackArguments FCA;
5541 Isolate* isolate = masm->isolate();
5544 __ Push(context, callee, call_data);
5548 Register scratch = call_data;
5549 if (!call_data_undefined) {
5550 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
5553 __ Push(scratch, scratch);
5555 Operand(ExternalReference::isolate_address(isolate)));
5557 __ Push(scratch, holder);
5560 __ mov(scratch,
sp);
5564 const int kApiStackSpace = 4;
5566 FrameScope frame_scope(masm, StackFrame::MANUAL);
5567 __ EnterExitFrame(
false, kApiStackSpace);
5569 ASSERT(!api_function_address.is(a0) && !scratch.is(a0));
5572 __ Addu(a0,
sp, Operand(1 * kPointerSize));
5576 __ Addu(at, scratch, Operand((FCA::kArgsLength - 1 + argc) * kPointerSize));
5579 __ li(at, Operand(argc));
5584 const int kStackUnwindSpace = argc + FCA::kArgsLength + 1;
5587 ApiFunction thunk_fun(thunk_address);
5588 ExternalReference thunk_ref = ExternalReference(&thunk_fun, thunk_type,
5591 AllowExternalCallThatCantCauseGC scope(masm);
5593 fp, (2 + FCA::kContextSaveIndex) * kPointerSize);
5595 int return_value_offset = 0;
5597 return_value_offset = 2 + FCA::kArgsLength;
5599 return_value_offset = 2 + FCA::kReturnValueOffset;
5601 MemOperand return_value_operand(
fp, return_value_offset * kPointerSize);
5603 __ CallApiFunctionAndReturn(api_function_address,
5606 return_value_operand,
5607 &context_restore_operand);
5611 void CallApiGetterStub::Generate(MacroAssembler* masm) {
5619 Register api_function_address = a2;
5622 __ Addu(a1, a0, Operand(1 * kPointerSize));
5624 const int kApiStackSpace = 1;
5625 FrameScope frame_scope(masm, StackFrame::MANUAL);
5626 __ EnterExitFrame(
false, kApiStackSpace);
5631 __ Addu(a1,
sp, Operand(1 * kPointerSize));
5637 ExternalReference::PROFILING_GETTER_CALL;
5638 ApiFunction thunk_fun(thunk_address);
5639 ExternalReference thunk_ref = ExternalReference(&thunk_fun, thunk_type,
5641 __ CallApiFunctionAndReturn(api_function_address,
5653 #endif // V8_TARGET_ARCH_MIPS
static const int kResourceDataOffset
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter NULL
void GenerateFast(MacroAssembler *masm)
static const int kLengthOffset
static const int kHashFieldOffset
static const int kBitFieldOffset
const int kCArgsSlotsSize
void GenerateFast(MacroAssembler *masm)
STATIC_CHECK((kStringRepresentationMask|kStringEncodingMask)==Internals::kFullStringRepresentationMask)
static const int kArgsLength
const intptr_t kSmiTagMask
static const int kEvacuationCandidateMask
void GenerateSlow(MacroAssembler *masm, const RuntimeCallHelper &call_helper)
bool IsHoleyElementsKind(ElementsKind kind)
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
static const int kPrototypeOrInitialMapOffset
#define COMPARE(asm_, compare_string)
static const int kValueOffset
static int SlotOffset(int index)
static void GenerateAheadOfTime(Isolate *isolate)
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf map
static const uint32_t kMask
static const int kCallerStackParameterCountFrameOffset
void Generate(MacroAssembler *masm)
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
const uint32_t kTwoByteStringTag
const int kFailureTypeTagSize
static const uint32_t kExponentMask
static Failure * InternalError()
static Smi * FromInt(int value)
void Generate(MacroAssembler *masm)
static const int kElementsKindBitCount
static const int kDataOffset
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
static const int kJSRegexpStaticOffsetsVectorSize
const int kNumCalleeSavedFPU
static Representation Integer32()
static void GenerateAheadOfTime(Isolate *isolate)
static void GenerateHashGetHash(MacroAssembler *masm, Register hash)
TypeImpl< ZoneTypeConfig > Type
static const int kExponentBias
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor) V8_OVERRIDE
static const intptr_t kPageAlignmentMask
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
static Failure * Exception()
const uint32_t kIsNotInternalizedMask
AllocationSiteOverrideMode
static const Function * FunctionForId(FunctionId id)
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
#define ASSERT(condition)
void Generate(MacroAssembler *masm)
const RegList kJSCallerSaved
static const int kContextOffset
WriteInt32ToHeapNumberStub(Register the_int, Register the_heap_number, Register scratch)
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
static void GenerateAheadOfTime(Isolate *isolate)
static Handle< Object > MegamorphicSentinel(Isolate *isolate)
const int kPointerSizeLog2
static void GenerateCompareFlatAsciiStrings(MacroAssembler *masm, Register left, Register right, Register scratch1, Register scratch2, Register scratch3, Register scratch4)
const uint32_t kStringRepresentationMask
const uint32_t kFCSRUnderflowFlagMask
static const int kCallerFPOffset
static const int kEntrySize
const bool FLAG_enable_slow_asserts
RestoreRegistersStateStub(SaveFPRegsMode with_fp)
const intptr_t kObjectAlignmentMask
MemOperand ContextOperand(Register context, int index)
static const int kContextOffset
const uint32_t kShortExternalStringMask
int GetSequenceIndexFromFastElementsKind(ElementsKind elements_kind)
static void GenerateAheadOfTime(Isolate *isolate)
static const int kLastSubjectOffset
static const int kZeroHash
const RegList kCalleeSavedFPU
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
void GenerateCall(MacroAssembler *masm, Register target)
static const int kLastCaptureCountOffset
const RegList kCallerSavedFPU
static void GenerateCopyCharactersLong(MacroAssembler *masm, Register dest, Register src, Register count, Register scratch1, Register scratch2, Register scratch3, Register scratch4, int flags)
static const int kFirstOffset
static const int kMinLength
const uint32_t kNotStringTag
static const int kParentOffset
static const int kNonMantissaBitsInTopWord
static const int kLiteralsOffset
const int kFastElementsKindPackedToHoley
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_string(expose_natives_as
DwVfpRegister DoubleRegister
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
static const int kLengthOffset
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
static const int kExponentShift
const intptr_t kFailureTagMask
static const int kValueOffset
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in only print modified registers Don t break for ASM_UNIMPLEMENTED_BREAK macros print stack trace when an illegal exception is thrown randomize hashes to avoid predictable hash Fixed seed to use to hash property Print the time it takes to deserialize the snapshot testing_bool_flag testing_int_flag string flag tmp file in which to serialize heap Print the time it takes to lazily compile hydrogen code stubs concurrent_recompilation concurrent_sweeping Print usage including flags
const int kFailureTagSize
bool IsFastPackedElementsKind(ElementsKind kind)
static void GenerateFlatAsciiStringEquals(MacroAssembler *masm, Register left, Register right, Register scratch1, Register scratch2, Register scratch3)
static void GenerateFixedRegStubsAheadOfTime(Isolate *isolate)
#define kLithiumScratchDouble
const uint32_t kFCSROverflowFlagMask
static const int kIrregexpCaptureCountOffset
static const size_t kWriteBarrierCounterOffset
const uint32_t kIsIndirectStringMask
ElementsKind GetFastElementsKindFromSequenceIndex(int sequence_number)
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
static const int kTransitionInfoOffset
StoreRegistersStateStub(SaveFPRegsMode with_fp)
static void MaybeCallEntryHook(MacroAssembler *masm)
static void GenerateAheadOfTime(Isolate *isolate)
void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
const RegList kCalleeSaved
static const int kCallerSPOffset
const int kNumJSCallerSaved
static const uint32_t kShift
static const int kPropertiesOffset
static void PatchBranchIntoNop(MacroAssembler *masm, int pos)
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
virtual void Generate(MacroAssembler *masm)
bool IsFastSmiElementsKind(ElementsKind kind)
static const int kMinLength
const uint32_t kShortExternalStringTag
static void GenerateHashAddCharacter(MacroAssembler *masm, Register hash, Register character)
static void Generate(MacroAssembler *masm, Register string, Register index, Register result, Label *call_runtime)
static const int kHeaderSize
void Generate(MacroAssembler *masm)
static void GenerateStubsAheadOfTime(Isolate *isolate)
static const int kElementsOffset
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
const uint32_t kStringTag
void Generate(MacroAssembler *masm)
static bool IsEqualityOp(Value op)
static Representation External()
static const int kCallTargetAddressOffset
static const int kOffsetOffset
friend class BlockTrampolinePoolScope
const uint32_t kInternalizedTag
static const int kLengthOffset
#define T(name, string, precedence)
static void GenerateNegativeLookup(MacroAssembler *masm, Label *miss, Label *done, Register receiver, Register properties, Handle< Name > name, Register scratch0)
void GenerateSlow(MacroAssembler *masm, const RuntimeCallHelper &call_helper)
virtual void Generate(MacroAssembler *masm)
bool IsFastSmiOrObjectElementsKind(ElementsKind kind)
static const int kLastMatchOverhead
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
static const int kHeaderSize
const intptr_t kPointerAlignmentMask
void Generate(MacroAssembler *masm)
const uint32_t kFCSRInvalidOpFlagMask
static Builtins::Name MissBuiltin(Code::Kind kind)
static const int kMapOffset
static const int kMantissaBitsInTopWord
static const int kMaxShortLength
static const int kSkipEvacuationSlotsRecordingMask
const uint32_t kIsNotStringMask
Register GetRegisterThatIsNotOneOf(Register reg1, Register reg2=no_reg, Register reg3=no_reg, Register reg4=no_reg, Register reg5=no_reg, Register reg6=no_reg)
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
const int kNumCalleeSaved
const uint32_t kSlicedNotConsMask
static const int kLengthOffset
void Generate(MacroAssembler *masm)
static const int kSecondOffset
static Handle< Object > UninitializedSentinel(Isolate *isolate)
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor) V8_OVERRIDE
Handle< T > handle(T *t, Isolate *isolate)
static const int kCallerFPOffset
static const int kArgumentsLengthIndex
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
MemOperand FieldMemOperand(Register object, int offset)
const intptr_t kObjectAlignment
const FPUControlRegister FCSR
static const int kFunctionOffset
ElementsKind GetInitialFastElementsKind()
static const int kFirstCaptureOffset
static const uint32_t kSignMask
static const int kLastInputOffset
static void GenerateMapChangeElementsTransition(MacroAssembler *masm, AllocationSiteMode mode, Label *allocation_memento_found)
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
static const int kHeaderSize
void InvokeAccessorGetterCallback(v8::Local< v8::String > property, const v8::PropertyCallbackInfo< v8::Value > &info, v8::AccessorGetterCallback getter)
static AllocationSiteMode GetMode(ElementsKind boilerplate_elements_kind)
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
void GenerateBody(MacroAssembler *masm, bool is_construct)
static const int kDataAsciiCodeOffset
ICCompareStub(Token::Value op, CompareIC::State left, CompareIC::State right, CompareIC::State handler)
static void GenerateStubsAheadOfTime(Isolate *isolate)
#define ASSERT_EQ(v1, v2)
static const int kElementsKindShift
CodeStubInterfaceDescriptor * GetInterfaceDescriptor(Isolate *isolate)
const uint32_t kOneByteStringTag
void Generate(MacroAssembler *masm)
static const int kArgumentsCalleeIndex
static const int kIsUndetectable
static const int kHeaderSize
static void InitializeForIsolate(Isolate *isolate)
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
static const int kInstrSize
static const int kDataTagOffset
static const uint32_t kHashBitMask
static const int kPrototypeOffset
void Generate(MacroAssembler *masm)
static const int kHashShift
static void GenerateAheadOfTime(Isolate *isolate)
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
static Representation Tagged()
static const int kNativeContextOffset
static void GenerateFixedRegStubsAheadOfTime(Isolate *isolate)
static const int kConstructStubOffset
static const int kExponentBits
static const int kCompilerHintsOffset
static const int kSharedFunctionInfoOffset
static void GeneratePositiveLookup(MacroAssembler *masm, Label *miss, Label *done, Register elements, Register name, Register r0, Register r1)
static const int kSloppyArgumentsObjectSize
void InvokeFunctionCallback(const v8::FunctionCallbackInfo< v8::Value > &info, v8::FunctionCallback callback)
static const int kBitField2Offset
static const int kMantissaBits
void Generate(MacroAssembler *masm)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in name
CEntryStub(int result_size, SaveFPRegsMode save_doubles=kDontSaveFPRegs)
ElementsKind GetHoleyElementsKind(ElementsKind packed_kind)
static const int32_t kMaxOneByteCharCode
static const int kExponentOffset
static const int kDataUC16CodeOffset
StoreBufferOverflowStub(SaveFPRegsMode save_fp)
static const int kStrictArgumentsObjectSize
NameDictionaryLookupStub(LookupMode mode)
static void GenerateHashInit(MacroAssembler *masm, Register hash, Register character)
bool IsFastDoubleElementsKind(ElementsKind kind)
static bool IsOrderedRelationalCompareOp(Value op)
const uint32_t kStringEncodingMask
static const int kInstanceTypeOffset
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
void Generate(MacroAssembler *masm)
static const int kMantissaOffset
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)