30 #if defined(V8_TARGET_ARCH_MIPS)
41 #define __ ACCESS_MASM(masm)
43 static void EmitIdenticalObjectComparison(MacroAssembler* masm,
47 static void EmitSmiNonsmiComparison(MacroAssembler* masm,
53 static void EmitTwoNonNanDoubleComparison(MacroAssembler* masm,
Condition cc);
54 static void EmitStrictTwoHeapObjectCompare(MacroAssembler* masm,
60 static void EmitCheckForHeapNumber(MacroAssembler* masm, Register operand,
61 Register scratch1, Register scratch2,
62 Label* not_a_heap_number) {
64 __ LoadRoot(scratch2, Heap::kHeapNumberMapRootIndex);
65 __ Branch(not_a_heap_number,
ne, scratch1, Operand(scratch2));
71 Label check_heap_number, call_builtin;
72 __ JumpIfNotSmi(a0, &check_heap_number);
76 __ bind(&check_heap_number);
77 EmitCheckForHeapNumber(masm, a0, a1, t0, &call_builtin);
81 __ bind(&call_builtin);
116 __ LoadRoot(a1, Heap::kEmptyFixedArrayRootIndex);
117 __ LoadRoot(a2, Heap::kTheHoleValueRootIndex);
118 __ LoadRoot(t0, Heap::kUndefinedValueRootIndex);
138 __ LoadRoot(t0, Heap::kFalseValueRootIndex);
140 __ TailCallRuntime(Runtime::kNewClosure, 3, 1);
161 __ LoadRoot(a1, Heap::kFunctionContextMapRootIndex);
175 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
176 for (
int i = Context::MIN_CONTEXT_SLOTS; i < length; i++) {
186 __ TailCallRuntime(Runtime::kNewFunctionContext, 1, 1);
209 __ LoadRoot(a2, Heap::kBlockContextMapRootIndex);
218 Label after_sentinel;
219 __ JumpIfNotSmi(a3, &after_sentinel);
220 if (FLAG_debug_code) {
221 const char* message =
"Expected 0 as a Smi sentinel";
222 __ Assert(
eq, message, a3, Operand(zero_reg));
227 __ bind(&after_sentinel);
237 __ LoadRoot(a1, Heap::kTheHoleValueRootIndex);
238 for (
int i = 0; i < slots_; i++) {
248 __ TailCallRuntime(Runtime::kPushBlockContext, 2, 1);
252 static void GenerateFastCloneShallowArrayCommon(
253 MacroAssembler* masm,
262 int elements_size = 0;
266 : FixedArray::SizeFor(length);
272 __ AllocateInNewSpace(size,
291 __ Addu(a2, v0, Operand(JSArray::kSize));
316 __ LoadRoot(t1, Heap::kUndefinedValueRootIndex);
317 __ Branch(&slow_case,
eq, a3, Operand(t1));
321 Label double_elements, check_fast_elements;
324 __ LoadRoot(t1, Heap::kFixedCOWArrayMapRootIndex);
325 __ Branch(&check_fast_elements,
ne, v0, Operand(t1));
326 GenerateFastCloneShallowArrayCommon(masm, 0,
331 __ bind(&check_fast_elements);
332 __ LoadRoot(t1, Heap::kFixedArrayMapRootIndex);
333 __ Branch(&double_elements,
ne, v0, Operand(t1));
334 GenerateFastCloneShallowArrayCommon(masm, length_,
339 __ bind(&double_elements);
344 if (FLAG_debug_code) {
348 message =
"Expected (writable) fixed array";
349 expected_map_index = Heap::kFixedArrayMapRootIndex;
351 message =
"Expected (writable) fixed double array";
352 expected_map_index = Heap::kFixedDoubleArrayMapRootIndex;
355 message =
"Expected copy-on-write fixed array";
356 expected_map_index = Heap::kFixedCOWArrayMapRootIndex;
361 __ LoadRoot(at, expected_map_index);
362 __ Assert(
eq, message, a3, Operand(at));
366 GenerateFastCloneShallowArrayCommon(masm, length_, mode, &slow_case);
372 __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1);
393 __ LoadRoot(t0, Heap::kUndefinedValueRootIndex);
394 __ Branch(&slow_case,
eq, a3, Operand(t0));
405 __ AllocateInNewSpace(size, v0, a1, a2, &slow_case,
TAG_OBJECT);
415 __ TailCallRuntime(Runtime::kCreateObjectLiteralShallow, 4, 1);
424 class ConvertToDoubleStub :
public CodeStub {
426 ConvertToDoubleStub(Register result_reg_1,
427 Register result_reg_2,
429 Register scratch_reg)
430 : result1_(result_reg_1),
431 result2_(result_reg_2),
433 zeros_(scratch_reg) { }
442 class ModeBits:
public BitField<OverwriteMode, 0, 2> {};
443 class OpBits:
public BitField<Token::Value, 2, 14> {};
445 Major MajorKey() {
return ConvertToDouble; }
448 return result1_.code() +
449 (result2_.code() << 4) +
450 (source_.code() << 8) +
451 (zeros_.code() << 12);
454 void Generate(MacroAssembler* masm);
458 void ConvertToDoubleStub::Generate(MacroAssembler* masm) {
459 #ifndef BIG_ENDIAN_FLOATING_POINT
460 Register exponent = result1_;
461 Register mantissa = result2_;
463 Register exponent = result2_;
464 Register mantissa = result1_;
475 __ subu(at, zero_reg, source_);
476 __ Movn(source_, at, exponent);
481 __ Branch(¬_special,
gt, source_, Operand(1));
484 const uint32_t exponent_word_for_1 =
487 __ Or(at, exponent, Operand(exponent_word_for_1));
488 __ Movn(exponent, at, source_);
491 __ mov(mantissa, zero_reg);
493 __ bind(¬_special);
496 __ Clz(zeros_, source_);
500 __ subu(mantissa, mantissa, zeros_);
501 __ sll(mantissa, mantissa, HeapNumber::kExponentShift);
502 __ Or(exponent, exponent, mantissa);
505 __ Addu(zeros_, zeros_, Operand(1));
507 __ sllv(source_, source_, zeros_);
514 __ or_(exponent, exponent, source_);
523 CpuFeatures::Scope scope(
FPU);
525 __ mtc1(scratch1,
f14);
528 __ mtc1(scratch1,
f12);
531 __ Move(a2, a3,
f14);
532 __ Move(a0, a1,
f12);
537 __ mov(scratch1, a0);
538 ConvertToDoubleStub stub1(a3, a2, scratch1, scratch2);
540 __ Call(stub1.GetCode());
542 __ mov(scratch1, a1);
543 ConvertToDoubleStub stub2(a1, a0, scratch1, scratch2);
544 __ Call(stub2.GetCode());
551 MacroAssembler* masm,
553 Register heap_number_map,
559 LoadNumber(masm, destination,
560 a0,
f14, a2, a3, heap_number_map, scratch1, scratch2, slow);
563 LoadNumber(masm, destination,
564 a1,
f12, a0, a1, heap_number_map, scratch1, scratch2, slow);
568 void FloatingPointHelper::LoadNumber(MacroAssembler* masm,
569 Destination destination,
574 Register heap_number_map,
578 if (FLAG_debug_code) {
579 __ AbortIfNotRootValue(heap_number_map,
580 Heap::kHeapNumberMapRootIndex,
581 "HeapNumberMap register clobbered.");
587 __ UntagAndJumpIfSmi(scratch1,
object, &is_smi);
589 __ JumpIfNotHeapNumber(
object, heap_number_map, scratch1, not_number);
594 CpuFeatures::Scope scope(
FPU);
613 CpuFeatures::Scope scope(
FPU);
615 __ mtc1(scratch1, dst);
616 __ cvt_d_w(dst, dst);
619 __ Move(dst1, dst2, dst);
624 __ mov(scratch1,
object);
625 ConvertToDoubleStub stub(dst2, dst1, scratch1, scratch2);
627 __ Call(stub.GetCode());
638 Register heap_number_map,
642 FPURegister double_scratch,
644 if (FLAG_debug_code) {
645 __ AbortIfNotRootValue(heap_number_map,
646 Heap::kHeapNumberMapRootIndex,
647 "HeapNumberMap register clobbered.");
650 Label not_in_int32_range;
652 __ UntagAndJumpIfSmi(dst,
object, &done);
654 __ Branch(not_number,
ne, scratch1, Operand(heap_number_map));
655 __ ConvertToInt32(
object,
660 ¬_in_int32_range);
663 __ bind(¬_in_int32_range);
667 __ EmitOutOfInt32RangeTruncate(dst,
677 Register int_scratch,
678 Destination destination,
679 FPURegister double_dst,
683 FPURegister single_scratch) {
684 ASSERT(!int_scratch.is(scratch2));
685 ASSERT(!int_scratch.is(dst1));
686 ASSERT(!int_scratch.is(dst2));
691 CpuFeatures::Scope scope(
FPU);
692 __ mtc1(int_scratch, single_scratch);
693 __ cvt_d_w(double_dst, single_scratch);
695 __ Move(dst1, dst2, double_dst);
698 Label fewer_than_20_useful_bits;
704 __ mov(dst2, int_scratch);
705 __ mov(dst1, int_scratch);
706 __ Branch(&done,
eq, int_scratch, Operand(zero_reg));
712 __ Branch(&skip_sub,
ge, dst2, Operand(zero_reg));
713 __ Subu(int_scratch, zero_reg, int_scratch);
719 __ Clz(dst1, int_scratch);
721 __ Subu(dst1, scratch2, dst1);
725 __ Ins(dst2, scratch2,
729 __ li(scratch2, Operand(1));
730 __ sllv(scratch2, scratch2, dst1);
732 __ Xor(scratch2, scratch2, at);
733 __ And(int_scratch, int_scratch, scratch2);
737 __ Branch(&fewer_than_20_useful_bits,
lt, scratch2, Operand(zero_reg));
739 __ srlv(at, int_scratch, scratch2);
740 __ or_(dst2, dst2, at);
742 __ subu(scratch2, at, scratch2);
743 __ sllv(dst1, int_scratch, scratch2);
746 __ bind(&fewer_than_20_useful_bits);
748 __ subu(scratch2, at, dst1);
749 __ sllv(scratch2, int_scratch, scratch2);
750 __ Or(dst2, dst2, scratch2);
752 __ mov(dst1, zero_reg);
760 Destination destination,
764 Register heap_number_map,
767 FPURegister single_scratch,
769 ASSERT(!scratch1.is(
object) && !scratch2.is(
object));
770 ASSERT(!scratch1.is(scratch2));
771 ASSERT(!heap_number_map.is(
object) &&
772 !heap_number_map.is(scratch1) &&
773 !heap_number_map.is(scratch2));
775 Label done, obj_is_not_smi;
777 __ JumpIfNotSmi(
object, &obj_is_not_smi);
778 __ SmiUntag(scratch1,
object);
780 scratch2, single_scratch);
783 __ bind(&obj_is_not_smi);
784 if (FLAG_debug_code) {
785 __ AbortIfNotRootValue(heap_number_map,
786 Heap::kHeapNumberMapRootIndex,
787 "HeapNumberMap register clobbered.");
789 __ JumpIfNotHeapNumber(
object, heap_number_map, scratch1, not_int32);
793 CpuFeatures::Scope scope(
FPU);
797 Register except_flag = scratch2;
806 __ Branch(not_int32,
ne, except_flag, Operand(zero_reg));
809 __ Move(dst1, dst2, double_dst);
813 ASSERT(!scratch1.is(
object) && !scratch2.is(
object));
820 __ Or(scratch1, scratch1, Operand(dst2));
821 __ Branch(&done,
eq, scratch1, Operand(zero_reg));
839 Register heap_number_map,
846 ASSERT(!scratch1.is(
object) && !scratch2.is(
object) && !scratch3.is(
object));
847 ASSERT(!scratch1.is(scratch2) &&
848 !scratch1.is(scratch3) &&
849 !scratch2.is(scratch3));
853 __ UntagAndJumpIfSmi(dst,
object, &done);
855 if (FLAG_debug_code) {
856 __ AbortIfNotRootValue(heap_number_map,
857 Heap::kHeapNumberMapRootIndex,
858 "HeapNumberMap register clobbered.");
860 __ JumpIfNotHeapNumber(
object, heap_number_map, scratch1, not_int32);
865 CpuFeatures::Scope scope(
FPU);
869 FPURegister single_scratch = double_scratch.low();
870 Register except_flag = scratch2;
879 __ Branch(not_int32,
ne, except_flag, Operand(zero_reg));
881 __ mfc1(dst, single_scratch);
890 __ Or(dst, scratch2, Operand(dst));
891 __ Branch(&done,
eq, dst, Operand(zero_reg));
900 __ srlv(dst, dst, scratch3);
903 __ subu(scratch3, at, scratch3);
904 __ sllv(scratch2, scratch2, scratch3);
905 __ Or(dst, dst, scratch2);
910 __ Branch(&skip_sub,
ge, scratch1, Operand(zero_reg));
911 __ Subu(dst, zero_reg, dst);
928 HeapNumber::kExponentShift,
940 __ Branch(not_int32,
lt, scratch, Operand(zero_reg));
947 __ srl(at, src1, 31);
948 __ subu(tmp, scratch, at);
949 __ Branch(not_int32,
gt, tmp, Operand(30));
951 __ And(tmp, src2, 0x3fffff);
952 __ Branch(not_int32,
ne, tmp, Operand(zero_reg));
966 __ or_(dst, dst, at);
970 __ subu(scratch, at, scratch);
972 __ sllv(src1, src2, scratch);
973 __ Subu(src1, src1, Operand(1));
974 __ And(src1, dst, src1);
975 __ Branch(not_int32,
ne, src1, Operand(zero_reg));
980 MacroAssembler* masm,
982 Register heap_number_result,
996 __ PrepareCallCFunction(4, scratch);
998 CpuFeatures::Scope scope(
FPU);
1004 __ Move(
f12, a0, a1);
1005 __ Move(
f14, a2, a3);
1008 AllowExternalCallThatCantCauseGC scope(masm);
1010 ExternalReference::double_fp_operation(op, masm->isolate()), 0, 2);
1014 CpuFeatures::Scope scope(
FPU);
1025 __ mov(v0, heap_number_result);
1031 if (the_int_.
is(a1) &&
1032 the_heap_number_.
is(v0) &&
1037 if (the_int_.
is(a2) &&
1038 the_heap_number_.
is(v0) &&
1053 stub1.GetCode()->set_is_pregenerated(
true);
1054 stub2.GetCode()->set_is_pregenerated(
true);
1059 void WriteInt32ToHeapNumberStub::Generate(MacroAssembler* masm) {
1060 Label max_negative_int;
1065 __ And(sign_, the_int_, Operand(0x80000000u));
1066 __ Branch(&max_negative_int,
eq, the_int_, Operand(0x80000000u));
1070 uint32_t non_smi_exponent =
1072 __ li(scratch_, Operand(non_smi_exponent));
1074 __ or_(scratch_, scratch_, sign_);
1076 __ subu(at, zero_reg, the_int_);
1077 __ Movn(the_int_, at, sign_);
1082 ASSERT(((1 << HeapNumber::kExponentShift) & non_smi_exponent) != 0);
1084 __ srl(at, the_int_, shift_distance);
1085 __ or_(scratch_, scratch_, at);
1088 __ sll(scratch_, the_int_, 32 - shift_distance);
1093 __ bind(&max_negative_int);
1102 __ mov(scratch_, zero_reg);
1112 static void EmitIdenticalObjectComparison(MacroAssembler* masm,
1115 bool never_nan_nan) {
1116 Label not_identical;
1117 Label heap_number, return_equal;
1118 Register exp_mask_reg = t5;
1120 __ Branch(¬_identical,
ne, a0, Operand(a1));
1124 if (cc !=
eq || !never_nan_nan) {
1132 __ GetObjectType(a0, t4, t4);
1135 __ GetObjectType(a0, t4, t4);
1145 __ LoadRoot(t2, Heap::kUndefinedValueRootIndex);
1146 __ Branch(&return_equal,
ne, a0, Operand(t2));
1152 __ li(v0, Operand(
LESS));
1160 __ bind(&return_equal);
1165 __ li(v0, Operand(
LESS));
1167 __ mov(v0, zero_reg);
1171 if (cc !=
eq || !never_nan_nan) {
1175 if (cc !=
lt && cc !=
gt) {
1176 __ bind(&heap_number);
1185 __ And(t3, t2, Operand(exp_mask_reg));
1187 __ Branch(&return_equal,
ne, t3, Operand(exp_mask_reg));
1193 __ Or(v0, t3, Operand(t2));
1200 __ Ret(
eq, v0, Operand(zero_reg));
1204 __ li(v0, Operand(
LESS));
1212 __ bind(¬_identical);
1216 static void EmitSmiNonsmiComparison(MacroAssembler* masm,
1219 Label* both_loaded_as_doubles,
1222 ASSERT((lhs.is(a0) && rhs.is(a1)) ||
1223 (lhs.is(a1) && rhs.is(a0)));
1226 __ JumpIfSmi(lhs, &lhs_is_smi);
1229 __ GetObjectType(lhs, t4, t4);
1244 CpuFeatures::Scope scope(
FPU);
1256 ConvertToDoubleStub stub1(a1, a0, t6, t5);
1258 __ Call(stub1.GetCode());
1264 __ jmp(both_loaded_as_doubles);
1266 __ bind(&lhs_is_smi);
1268 __ GetObjectType(rhs, t4, t4);
1273 __ li(v0, Operand(1));
1283 CpuFeatures::Scope scope(
FPU);
1291 ConvertToDoubleStub stub2(a3, a2, t6, t5);
1293 __ Call(stub2.GetCode());
1308 void EmitNanCheck(MacroAssembler* masm,
Condition cc) {
1311 CpuFeatures::Scope scope(
FPU);
1313 __ Move(t0, t1,
f14);
1314 __ Move(t2, t3,
f12);
1322 Register rhs_exponent = exp_first ? t0 : t1;
1323 Register lhs_exponent = exp_first ? t2 : t3;
1324 Register rhs_mantissa = exp_first ? t1 : t0;
1325 Register lhs_mantissa = exp_first ? t3 : t2;
1326 Label one_is_nan, neither_is_nan;
1327 Label lhs_not_nan_exp_mask_is_loaded;
1329 Register exp_mask_reg = t4;
1331 __ and_(t5, lhs_exponent, exp_mask_reg);
1332 __ Branch(&lhs_not_nan_exp_mask_is_loaded,
ne, t5, Operand(exp_mask_reg));
1335 __ Branch(&one_is_nan,
ne, t5, Operand(zero_reg));
1337 __ Branch(&one_is_nan,
ne, lhs_mantissa, Operand(zero_reg));
1340 __ bind(&lhs_not_nan_exp_mask_is_loaded);
1341 __ and_(t5, rhs_exponent, exp_mask_reg);
1343 __ Branch(&neither_is_nan,
ne, t5, Operand(exp_mask_reg));
1346 __ Branch(&one_is_nan,
ne, t5, Operand(zero_reg));
1348 __ Branch(&neither_is_nan,
eq, rhs_mantissa, Operand(zero_reg));
1350 __ bind(&one_is_nan);
1354 if (cc ==
lt || cc ==
le) {
1357 __ li(v0, Operand(
LESS));
1361 __ bind(&neither_is_nan);
1365 static void EmitTwoNonNanDoubleComparison(MacroAssembler* masm,
Condition cc) {
1372 Label return_result_not_equal, return_result_equal;
1378 CpuFeatures::Scope scope(
FPU);
1380 __ Move(t0, t1,
f14);
1381 __ Move(t2, t3,
f12);
1389 Register rhs_exponent = exp_first ? t0 : t1;
1390 Register lhs_exponent = exp_first ? t2 : t3;
1391 Register rhs_mantissa = exp_first ? t1 : t0;
1392 Register lhs_mantissa = exp_first ? t3 : t2;
1394 __ xor_(v0, rhs_mantissa, lhs_mantissa);
1395 __ Branch(&return_result_not_equal,
ne, v0, Operand(zero_reg));
1397 __ subu(v0, rhs_exponent, lhs_exponent);
1398 __ Branch(&return_result_equal,
eq, v0, Operand(zero_reg));
1402 __ or_(t4, rhs_exponent, lhs_exponent);
1403 __ or_(t4, t4, rhs_mantissa);
1405 __ Branch(&return_result_not_equal,
ne, t4, Operand(zero_reg));
1407 __ bind(&return_result_equal);
1413 __ bind(&return_result_not_equal);
1417 __ PrepareCallCFunction(0, 2, t4);
1424 __ Move(
f12, a0, a1);
1425 __ Move(
f14, a2, a3);
1428 AllowExternalCallThatCantCauseGC scope(masm);
1429 __ CallCFunction(ExternalReference::compare_doubles(masm->isolate()),
1434 CpuFeatures::Scope scope(
FPU);
1435 Label
equal, less_than;
1448 __ bind(&less_than);
1449 __ li(v0, Operand(
LESS));
1455 static void EmitStrictTwoHeapObjectCompare(MacroAssembler* masm,
1462 Label first_non_object;
1465 __ GetObjectType(lhs, a2, a2);
1469 Label return_not_equal;
1470 __ bind(&return_not_equal);
1472 __ li(v0, Operand(1));
1474 __ bind(&first_non_object);
1478 __ GetObjectType(rhs, a3, a3);
1488 __ And(t2, a2, Operand(a3));
1490 __ Branch(&return_not_equal,
ne, t0, Operand(zero_reg));
1494 static void EmitCheckForTwoHeapNumbers(MacroAssembler* masm,
1497 Label* both_loaded_as_doubles,
1498 Label* not_heap_numbers,
1500 __ GetObjectType(lhs, a3, a2);
1504 __ Branch(slow,
ne, a3, Operand(a2));
1509 CpuFeatures::Scope scope(
FPU);
1523 __ jmp(both_loaded_as_doubles);
1528 static void EmitCheckForSymbolsOrObjects(MacroAssembler* masm,
1531 Label* possible_strings,
1532 Label* not_both_strings) {
1533 ASSERT((lhs.is(a0) && rhs.is(a1)) ||
1534 (lhs.is(a1) && rhs.is(a0)));
1541 __ Branch(&object_test,
ne, at, Operand(zero_reg));
1543 __ Branch(possible_strings,
eq, at, Operand(zero_reg));
1544 __ GetObjectType(rhs, a3, a3);
1547 __ Branch(possible_strings,
eq, at, Operand(zero_reg));
1552 __ li(v0, Operand(1));
1554 __ bind(&object_test);
1556 __ GetObjectType(rhs, a2, a3);
1565 __ and_(a0, a2, a3);
1581 Register number_string_cache = result;
1582 Register mask = scratch3;
1585 __ LoadRoot(number_string_cache, Heap::kNumberStringCacheRootIndex);
1592 __ Addu(mask, mask, -1);
1598 Isolate* isolate = masm->isolate();
1600 Label load_result_from_cache;
1601 if (!object_is_smi) {
1602 __ JumpIfSmi(
object, &is_smi);
1604 CpuFeatures::Scope scope(
FPU);
1607 Heap::kHeapNumberMapRootIndex,
1617 __ Xor(scratch1, scratch1, Operand(scratch2));
1618 __ And(scratch1, scratch1, Operand(mask));
1623 __ Addu(scratch1, number_string_cache, scratch1);
1625 Register probe = mask;
1628 __ JumpIfSmi(probe, not_found);
1632 __ Branch(not_found);
1637 __ Branch(not_found);
1642 Register scratch = scratch1;
1643 __ sra(scratch,
object, 1);
1644 __ And(scratch, mask, Operand(scratch));
1649 __ Addu(scratch, number_string_cache, scratch);
1652 Register probe = mask;
1654 __ Branch(not_found,
ne,
object, Operand(probe));
1657 __ bind(&load_result_from_cache);
1661 __ IncrementCounter(isolate->counters()->number_to_string_native(),
1668 void NumberToStringStub::Generate(MacroAssembler* masm) {
1679 __ TailCallRuntime(Runtime::kNumberToString, 1, 1);
1688 Label not_smis, both_loaded_as_doubles;
1691 if (include_smi_compare_) {
1692 Label not_two_smis, smi_done;
1694 __ JumpIfNotSmi(a2, ¬_two_smis);
1698 __ subu(v0, a1, a0);
1699 __ bind(¬_two_smis);
1700 }
else if (FLAG_debug_code) {
1703 __ Assert(
ne,
"CompareStub: unexpected smi operands.",
1704 a2, Operand(zero_reg));
1713 EmitIdenticalObjectComparison(masm, &slow, cc_, never_nan_nan_);
1719 __ And(t2, lhs_, Operand(rhs_));
1720 __ JumpIfNotSmi(t2, ¬_smis, t0);
1729 EmitSmiNonsmiComparison(masm, lhs_, rhs_,
1730 &both_loaded_as_doubles, &slow, strict_);
1732 __ bind(&both_loaded_as_doubles);
1737 Isolate* isolate = masm->isolate();
1739 CpuFeatures::Scope scope(
FPU);
1741 __ li(t0, Operand(
LESS));
1766 if (cc_ ==
lt || cc_ ==
le) {
1769 __ li(v0, Operand(
LESS));
1775 EmitNanCheck(masm, cc_);
1779 EmitTwoNonNanDoubleComparison(masm, cc_);
1788 EmitStrictTwoHeapObjectCompare(masm, lhs_, rhs_);
1791 Label check_for_symbols;
1792 Label flat_string_check;
1797 EmitCheckForTwoHeapNumbers(masm,
1800 &both_loaded_as_doubles,
1802 &flat_string_check);
1804 __ bind(&check_for_symbols);
1805 if (cc_ ==
eq && !strict_) {
1809 EmitCheckForSymbolsOrObjects(masm, lhs_, rhs_, &flat_string_check, &slow);
1814 __ bind(&flat_string_check);
1816 __ JumpIfNonSmisNotBothSequentialAsciiStrings(lhs_, rhs_, a2, a3, &slow);
1818 __ IncrementCounter(isolate->counters()->string_compare_native(), 1, a2, a3);
1840 __ Push(lhs_, rhs_);
1844 native = strict_ ? Builtins::STRICT_EQUALS : Builtins::EQUALS;
1848 if (cc_ ==
lt || cc_ ==
le) {
1868 CpuFeatures::Scope scope(
FPU);
1871 const Register map = t5.is(tos_) ? t3 : t5;
1874 CheckOddball(masm,
UNDEFINED, Heap::kUndefinedValueRootIndex,
false);
1877 CheckOddball(masm,
BOOLEAN, Heap::kFalseValueRootIndex,
false);
1878 CheckOddball(masm,
BOOLEAN, Heap::kTrueValueRootIndex,
true);
1881 CheckOddball(masm,
NULL_TYPE, Heap::kNullValueRootIndex,
false);
1887 __ Ret(
eq, at, Operand(zero_reg));
1890 __ JumpIfSmi(tos_, &patch);
1900 __ Movn(tos_, zero_reg, at);
1901 __ Ret(
ne, at, Operand(zero_reg));
1924 Label not_heap_number;
1925 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
1926 __ Branch(¬_heap_number,
ne, map, Operand(at));
1927 Label zero_or_nan, number;
1929 __ BranchF(&number, &zero_or_nan,
ne,
f2, kDoubleRegZero);
1933 __ bind(&zero_or_nan);
1934 __ mov(tos_, zero_reg);
1937 __ bind(¬_heap_number);
1941 GenerateTypeTransition(masm);
1945 void ToBooleanStub::CheckOddball(MacroAssembler* masm,
1951 __ LoadRoot(at, value);
1952 __ Subu(at, at, tos_);
1956 __ Movz(tos_, zero_reg, at);
1958 __ Ret(
eq, at, Operand(zero_reg));
1963 void ToBooleanStub::GenerateTypeTransition(MacroAssembler* masm) {
1967 __ Push(a3, a2, a1);
1970 __ TailCallExternalReference(
1971 ExternalReference(IC_Utility(IC::kToBoolean_Patch), masm->isolate()),
1983 CpuFeatures::Scope scope(
FPU);
1986 const int argument_count = 1;
1987 const int fp_argument_count = 0;
1988 const Register scratch = a1;
1990 AllowExternalCallThatCantCauseGC scope(masm);
1991 __ PrepareCallCFunction(argument_count, fp_argument_count, scratch);
1992 __ li(a0, Operand(ExternalReference::isolate_address()));
1994 ExternalReference::store_buffer_overflow_function(masm->isolate()),
1997 CpuFeatures::Scope scope(
FPU);
2006 void UnaryOpStub::PrintName(StringStream* stream) {
2008 const char* overwrite_name =
NULL;
2013 stream->Add(
"UnaryOpStub_%s_%s_%s",
2021 void UnaryOpStub::Generate(MacroAssembler* masm) {
2022 switch (operand_type_) {
2024 GenerateTypeTransition(masm);
2027 GenerateSmiStub(masm);
2030 GenerateHeapNumberStub(masm);
2033 GenerateGenericStub(masm);
2039 void UnaryOpStub::GenerateTypeTransition(MacroAssembler* masm) {
2044 __ Push(v0, a2, a1, a0);
2046 __ TailCallExternalReference(
2047 ExternalReference(IC_Utility(IC::kUnaryOp_Patch), masm->isolate()), 4, 1);
2052 void UnaryOpStub::GenerateSmiStub(MacroAssembler* masm) {
2055 GenerateSmiStubSub(masm);
2057 case Token::BIT_NOT:
2058 GenerateSmiStubBitNot(masm);
2066 void UnaryOpStub::GenerateSmiStubSub(MacroAssembler* masm) {
2067 Label non_smi, slow;
2068 GenerateSmiCodeSub(masm, &non_smi, &slow);
2071 GenerateTypeTransition(masm);
2075 void UnaryOpStub::GenerateSmiStubBitNot(MacroAssembler* masm) {
2077 GenerateSmiCodeBitNot(masm, &non_smi);
2079 GenerateTypeTransition(masm);
2083 void UnaryOpStub::GenerateSmiCodeSub(MacroAssembler* masm,
2086 __ JumpIfNotSmi(a0, non_smi);
2089 __ And(t0, a0, ~0x80000000);
2090 __ Branch(slow,
eq, t0, Operand(zero_reg));
2094 __ subu(v0, zero_reg, a0);
2098 void UnaryOpStub::GenerateSmiCodeBitNot(MacroAssembler* masm,
2100 __ JumpIfNotSmi(a0, non_smi);
2110 void UnaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) {
2113 GenerateHeapNumberStubSub(masm);
2115 case Token::BIT_NOT:
2116 GenerateHeapNumberStubBitNot(masm);
2124 void UnaryOpStub::GenerateHeapNumberStubSub(MacroAssembler* masm) {
2125 Label non_smi, slow, call_builtin;
2126 GenerateSmiCodeSub(masm, &non_smi, &call_builtin);
2128 GenerateHeapNumberCodeSub(masm, &slow);
2130 GenerateTypeTransition(masm);
2131 __ bind(&call_builtin);
2132 GenerateGenericCodeFallback(masm);
2136 void UnaryOpStub::GenerateHeapNumberStubBitNot(MacroAssembler* masm) {
2137 Label non_smi, slow;
2138 GenerateSmiCodeBitNot(masm, &non_smi);
2140 GenerateHeapNumberCodeBitNot(masm, &slow);
2142 GenerateTypeTransition(masm);
2146 void UnaryOpStub::GenerateHeapNumberCodeSub(MacroAssembler* masm,
2148 EmitCheckForHeapNumber(masm, a0, a1, t2, slow);
2155 Label slow_allocate_heapnumber, heapnumber_allocated;
2156 __ AllocateHeapNumber(a1, a2, a3, t2, &slow_allocate_heapnumber);
2157 __ jmp(&heapnumber_allocated);
2159 __ bind(&slow_allocate_heapnumber);
2163 __ CallRuntime(Runtime::kNumberAlloc, 0);
2168 __ bind(&heapnumber_allocated);
2180 void UnaryOpStub::GenerateHeapNumberCodeBitNot(
2181 MacroAssembler* masm,
2185 EmitCheckForHeapNumber(masm, a0, a1, t2, slow);
2187 __ ConvertToInt32(a0, a1, a2, a3,
f0, slow);
2192 __ Addu(a2, a1, Operand(0x40000000));
2193 __ Branch(&try_float,
lt, a2, Operand(zero_reg));
2200 __ bind(&try_float);
2202 Label slow_allocate_heapnumber, heapnumber_allocated;
2204 __ AllocateHeapNumber(a2, a3, t0, t2, &slow_allocate_heapnumber);
2205 __ jmp(&heapnumber_allocated);
2207 __ bind(&slow_allocate_heapnumber);
2211 __ CallRuntime(Runtime::kNumberAlloc, 0);
2220 __ ConvertToInt32(v0, a1, a3, t0,
f0, &impossible);
2224 __ bind(&heapnumber_allocated);
2230 CpuFeatures::Scope scope(
FPU);
2238 WriteInt32ToHeapNumberStub stub(a1, v0, a2, a3);
2239 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET);
2242 __ bind(&impossible);
2243 if (FLAG_debug_code) {
2244 __ stop(
"Incorrect assumption in bit-not stub");
2250 void UnaryOpStub::GenerateGenericStub(MacroAssembler* masm) {
2253 GenerateGenericStubSub(masm);
2255 case Token::BIT_NOT:
2256 GenerateGenericStubBitNot(masm);
2264 void UnaryOpStub::GenerateGenericStubSub(MacroAssembler* masm) {
2265 Label non_smi, slow;
2266 GenerateSmiCodeSub(masm, &non_smi, &slow);
2268 GenerateHeapNumberCodeSub(masm, &slow);
2270 GenerateGenericCodeFallback(masm);
2274 void UnaryOpStub::GenerateGenericStubBitNot(MacroAssembler* masm) {
2275 Label non_smi, slow;
2276 GenerateSmiCodeBitNot(masm, &non_smi);
2278 GenerateHeapNumberCodeBitNot(masm, &slow);
2280 GenerateGenericCodeFallback(masm);
2284 void UnaryOpStub::GenerateGenericCodeFallback(
2285 MacroAssembler* masm) {
2292 case Token::BIT_NOT:
2301 void BinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) {
2309 __ Push(a2, a1, a0);
2311 __ TailCallExternalReference(
2312 ExternalReference(IC_Utility(IC::kBinaryOp_Patch),
2319 void BinaryOpStub::GenerateTypeTransitionWithSavedArgs(
2320 MacroAssembler* masm) {
2325 void BinaryOpStub::Generate(MacroAssembler* masm) {
2328 AllowStubCallsScope allow_stub_calls(masm,
true);
2329 switch (operands_type_) {
2331 GenerateTypeTransition(masm);
2334 GenerateSmiStub(masm);
2337 GenerateInt32Stub(masm);
2340 GenerateHeapNumberStub(masm);
2343 GenerateOddballStub(masm);
2346 GenerateBothStringStub(masm);
2349 GenerateStringStub(masm);
2352 GenerateGeneric(masm);
2360 void BinaryOpStub::PrintName(StringStream* stream) {
2362 const char* overwrite_name;
2367 default: overwrite_name =
"UnknownOverwrite";
break;
2369 stream->Add(
"BinaryOpStub_%s_%s_%s",
2377 void BinaryOpStub::GenerateSmiSmiOperation(MacroAssembler* masm) {
2379 Register right = a0;
2381 Register scratch1 = t0;
2382 Register scratch2 = t1;
2387 Label not_smi_result;
2390 __ AdduAndCheckForOverflow(v0, left, right, scratch1);
2391 __ RetOnNoOverflow(scratch1);
2395 __ SubuAndCheckForOverflow(v0, left, right, scratch1);
2396 __ RetOnNoOverflow(scratch1);
2402 __ SmiUntag(scratch1, right);
2406 __ Mult(left, scratch1);
2411 __ sra(scratch1, scratch1, 31);
2412 __ Branch(¬_smi_result,
ne, scratch1, Operand(scratch2));
2415 __ Ret(
ne, v0, Operand(zero_reg));
2418 __ Addu(scratch2, right, left);
2422 __ Branch(&skip,
lt, scratch2, Operand(zero_reg));
2425 __ mov(v0, zero_reg);
2433 __ SmiUntag(scratch2, right);
2434 __ SmiUntag(scratch1, left);
2435 __ Div(scratch1, scratch2);
2438 __ Branch(¬_smi_result,
eq, scratch2, Operand(zero_reg));
2444 __ Branch(¬_smi_result,
ne, scratch1, Operand(zero_reg));
2446 __ Branch(&done,
ne, scratch1, Operand(zero_reg));
2447 __ Branch(¬_smi_result,
lt, scratch2, Operand(zero_reg));
2450 __ Addu(scratch2, scratch1, Operand(0x40000000));
2451 __ Branch(¬_smi_result,
lt, scratch2, Operand(zero_reg));
2452 __ SmiTag(v0, scratch1);
2458 __ SmiUntag(scratch2, right);
2459 __ SmiUntag(scratch1, left);
2460 __ Div(scratch1, scratch2);
2464 __ Branch(¬_smi_result,
eq, scratch2, Operand(zero_reg));
2469 __ Branch(&done,
ne, scratch2, Operand(zero_reg));
2470 __ Branch(¬_smi_result,
lt, scratch1, Operand(zero_reg));
2473 __ Addu(scratch1, scratch2, Operand(0x40000000));
2474 __ Branch(¬_smi_result,
lt, scratch1, Operand(zero_reg));
2475 __ SmiTag(v0, scratch2);
2481 __ or_(v0, left, right);
2483 case Token::BIT_AND:
2485 __ and_(v0, left, right);
2487 case Token::BIT_XOR:
2489 __ xor_(v0, left, right);
2493 __ GetLeastBitsFromSmi(scratch1, right, 5);
2494 __ srav(scratch1, left, scratch1);
2502 __ SmiUntag(scratch1, left);
2503 __ GetLeastBitsFromSmi(scratch2, right, 5);
2504 __ srlv(v0, scratch1, scratch2);
2507 __ And(scratch1, v0, Operand(0xc0000000));
2508 __ Branch(¬_smi_result,
ne, scratch1, Operand(zero_reg));
2515 __ SmiUntag(scratch1, left);
2516 __ GetLeastBitsFromSmi(scratch2, right, 5);
2517 __ sllv(scratch1, scratch1, scratch2);
2519 __ Addu(scratch2, scratch1, Operand(0x40000000));
2520 __ Branch(¬_smi_result,
lt, scratch2, Operand(zero_reg));
2521 __ SmiTag(v0, scratch1);
2527 __ bind(¬_smi_result);
2531 void BinaryOpStub::GenerateFPOperation(MacroAssembler* masm,
2534 Label* gc_required) {
2536 Register right = a0;
2537 Register scratch1 = t3;
2538 Register scratch2 = t5;
2539 Register scratch3 = t0;
2541 ASSERT(smi_operands || (not_numbers !=
NULL));
2542 if (smi_operands && FLAG_debug_code) {
2543 __ AbortIfNotSmi(left);
2544 __ AbortIfNotSmi(right);
2547 Register heap_number_map = t2;
2548 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
2565 Register result =
s0;
2566 GenerateHeapResultAllocation(
2567 masm, result, heap_number_map, scratch1, scratch2, gc_required);
2586 CpuFeatures::Scope scope(
FPU);
2616 if (FLAG_debug_code) {
2617 __ stop(
"Unreachable code.");
2623 case Token::BIT_XOR:
2624 case Token::BIT_AND:
2629 __ SmiUntag(a3, left);
2630 __ SmiUntag(a2, right);
2652 Label result_not_a_smi;
2655 __ Or(a2, a3, Operand(a2));
2657 case Token::BIT_XOR:
2658 __ Xor(a2, a3, Operand(a2));
2660 case Token::BIT_AND:
2661 __ And(a2, a3, Operand(a2));
2665 __ GetLeastBitsFromInt32(a2, a2, 5);
2666 __ srav(a2, a3, a2);
2670 __ GetLeastBitsFromInt32(a2, a2, 5);
2671 __ srlv(a2, a3, a2);
2677 __ Branch(&result_not_a_smi,
lt, a2, Operand(zero_reg));
2679 __ Branch(not_numbers,
lt, a2, Operand(zero_reg));
2684 __ GetLeastBitsFromInt32(a2, a2, 5);
2685 __ sllv(a2, a3, a2);
2691 __ Addu(a3, a2, Operand(0x40000000));
2692 __ Branch(&result_not_a_smi,
lt, a3, Operand(zero_reg));
2697 __ bind(&result_not_a_smi);
2698 Register result = t1;
2700 __ AllocateHeapNumber(
2701 result, scratch1, scratch2, heap_number_map, gc_required);
2703 GenerateHeapResultAllocation(
2704 masm, result, heap_number_map, scratch1, scratch2, gc_required);
2717 CpuFeatures::Scope scope(
FPU);
2719 if (op_ == Token::SHR) {
2732 WriteInt32ToHeapNumberStub stub(a2, v0, a3, a0);
2733 __ TailCallStub(&stub);
2747 void BinaryOpStub::GenerateSmiCode(
2748 MacroAssembler* masm,
2751 SmiCodeGenerateHeapNumberResults allow_heapnumber_results) {
2755 Register right = a0;
2756 Register scratch1 = t3;
2759 __ Or(scratch1, left, Operand(right));
2761 __ JumpIfNotSmi(scratch1, ¬_smis);
2764 GenerateSmiSmiOperation(masm);
2768 if (allow_heapnumber_results == ALLOW_HEAPNUMBER_RESULTS) {
2769 GenerateFPOperation(masm,
true, use_runtime, gc_required);
2775 void BinaryOpStub::GenerateSmiStub(MacroAssembler* masm) {
2776 Label not_smis, call_runtime;
2781 GenerateSmiCode(masm, &call_runtime,
NULL, NO_HEAPNUMBER_RESULTS);
2785 GenerateSmiCode(masm,
2788 ALLOW_HEAPNUMBER_RESULTS);
2793 GenerateTypeTransition(masm);
2795 __ bind(&call_runtime);
2796 GenerateCallRuntime(masm);
2800 void BinaryOpStub::GenerateStringStub(MacroAssembler* masm) {
2804 GenerateAddStrings(masm);
2805 GenerateTypeTransition(masm);
2809 void BinaryOpStub::GenerateBothStringStub(MacroAssembler* masm) {
2818 Register right = a0;
2821 __ JumpIfSmi(left, &call_runtime);
2822 __ GetObjectType(left, a2, a2);
2826 __ JumpIfSmi(right, &call_runtime);
2827 __ GetObjectType(right, a2, a2);
2831 GenerateRegisterArgsPush(masm);
2832 __ TailCallStub(&string_add_stub);
2834 __ bind(&call_runtime);
2835 GenerateTypeTransition(masm);
2839 void BinaryOpStub::GenerateInt32Stub(MacroAssembler* masm) {
2843 Register right = a0;
2844 Register scratch1 = t3;
2845 Register scratch2 = t5;
2846 FPURegister double_scratch =
f0;
2847 FPURegister single_scratch =
f6;
2849 Register heap_number_result =
no_reg;
2850 Register heap_number_map = t2;
2851 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
2861 __ Or(scratch1, left, right);
2862 __ JumpIfNotSmi(scratch1, &skip);
2863 GenerateSmiSmiOperation(masm);
2905 CpuFeatures::Scope scope(
FPU);
2906 Label return_heap_number;
2930 Register except_flag = scratch2;
2939 __ Branch(&transition,
ne, except_flag, Operand(zero_reg));
2943 __ mfc1(scratch1, single_scratch);
2944 __ Addu(scratch2, scratch1, Operand(0x40000000));
2946 __ Branch(&return_heap_number,
lt, scratch2, Operand(zero_reg));
2949 __ Branch(¬_zero,
ne, scratch1, Operand(zero_reg));
2950 __ mfc1(scratch2,
f11);
2952 __ Branch(&return_heap_number,
ne, scratch2, Operand(zero_reg));
2956 __ SmiTag(v0, scratch1);
2962 __ bind(&return_heap_number);
2968 heap_number_result =
s0;
2969 GenerateHeapResultAllocation(masm,
2975 __ mov(v0, heap_number_result);
2988 Label pop_and_call_runtime;
2991 heap_number_result =
s0;
2992 GenerateHeapResultAllocation(masm,
2997 &pop_and_call_runtime);
3004 masm, op_, heap_number_result, scratch1);
3005 if (FLAG_debug_code) {
3006 __ stop(
"Unreachable code.");
3009 __ bind(&pop_and_call_runtime);
3011 __ Branch(&call_runtime);
3018 case Token::BIT_XOR:
3019 case Token::BIT_AND:
3023 Label return_heap_number;
3024 Register scratch3 = t1;
3051 __ Or(a2, a3, Operand(a2));
3053 case Token::BIT_XOR:
3054 __ Xor(a2, a3, Operand(a2));
3056 case Token::BIT_AND:
3057 __ And(a2, a3, Operand(a2));
3060 __ And(a2, a2, Operand(0x1f));
3061 __ srav(a2, a3, a2);
3064 __ And(a2, a2, Operand(0x1f));
3065 __ srlv(a2, a3, a2);
3073 __ Branch((result_type_ <= BinaryOpIC::INT32)
3075 : &return_heap_number,
3080 __ Branch((result_type_ <= BinaryOpIC::INT32)
3089 __ And(a2, a2, Operand(0x1f));
3090 __ sllv(a2, a3, a2);
3097 __ Addu(scratch1, a2, Operand(0x40000000));
3099 __ Branch(&return_heap_number,
lt, scratch1, Operand(zero_reg));
3104 __ bind(&return_heap_number);
3105 heap_number_result = t1;
3106 GenerateHeapResultAllocation(masm,
3114 CpuFeatures::Scope scope(
FPU);
3116 if (op_ != Token::SHR) {
3118 __ mtc1(a2, double_scratch);
3119 __ cvt_d_w(double_scratch, double_scratch);
3122 __ mtc1(a2, double_scratch);
3123 __ Cvt_d_uw(double_scratch, double_scratch, single_scratch);
3127 __ mov(v0, heap_number_result);
3134 WriteInt32ToHeapNumberStub stub(a2, v0, a3, a0);
3135 __ TailCallStub(&stub);
3148 if (transition.is_linked() ||
3149 ((op_ ==
Token::DIV) && (result_type_ <= BinaryOpIC::INT32))) {
3150 __ bind(&transition);
3151 GenerateTypeTransition(masm);
3154 __ bind(&call_runtime);
3155 GenerateCallRuntime(masm);
3159 void BinaryOpStub::GenerateOddballStub(MacroAssembler* masm) {
3165 GenerateAddStrings(masm);
3170 __ LoadRoot(t0, Heap::kUndefinedValueRootIndex);
3171 __ Branch(&check,
ne, a1, Operand(t0));
3175 __ LoadRoot(a1, Heap::kNanValueRootIndex);
3179 __ LoadRoot(t0, Heap::kUndefinedValueRootIndex);
3180 __ Branch(&done,
ne, a0, Operand(t0));
3184 __ LoadRoot(a0, Heap::kNanValueRootIndex);
3188 GenerateHeapNumberStub(masm);
3192 void BinaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) {
3194 GenerateFPOperation(masm,
false, &call_runtime, &call_runtime);
3196 __ bind(&call_runtime);
3197 GenerateCallRuntime(masm);
3201 void BinaryOpStub::GenerateGeneric(MacroAssembler* masm) {
3202 Label call_runtime, call_string_add_or_runtime;
3204 GenerateSmiCode(masm, &call_runtime, &call_runtime, ALLOW_HEAPNUMBER_RESULTS);
3206 GenerateFPOperation(masm,
false, &call_string_add_or_runtime, &call_runtime);
3208 __ bind(&call_string_add_or_runtime);
3210 GenerateAddStrings(masm);
3213 __ bind(&call_runtime);
3214 GenerateCallRuntime(masm);
3218 void BinaryOpStub::GenerateAddStrings(MacroAssembler* masm) {
3220 Label left_not_string, call_runtime;
3223 Register right = a0;
3226 __ JumpIfSmi(left, &left_not_string);
3227 __ GetObjectType(left, a2, a2);
3231 GenerateRegisterArgsPush(masm);
3232 __ TailCallStub(&string_add_left_stub);
3235 __ bind(&left_not_string);
3236 __ JumpIfSmi(right, &call_runtime);
3237 __ GetObjectType(right, a2, a2);
3241 GenerateRegisterArgsPush(masm);
3242 __ TailCallStub(&string_add_right_stub);
3245 __ bind(&call_runtime);
3249 void BinaryOpStub::GenerateCallRuntime(MacroAssembler* masm) {
3250 GenerateRegisterArgsPush(masm);
3270 case Token::BIT_AND:
3273 case Token::BIT_XOR:
3291 void BinaryOpStub::GenerateHeapResultAllocation(
3292 MacroAssembler* masm,
3294 Register heap_number_map,
3297 Label* gc_required) {
3301 ASSERT(!result.is(a0) && !result.is(a1));
3304 Label skip_allocation, allocated;
3305 Register overwritable_operand = mode_ ==
OVERWRITE_LEFT ? a1 : a0;
3308 __ JumpIfNotSmi(overwritable_operand, &skip_allocation);
3310 __ AllocateHeapNumber(
3311 result, scratch1, scratch2, heap_number_map, gc_required);
3312 __ Branch(&allocated);
3313 __ bind(&skip_allocation);
3315 __ mov(result, overwritable_operand);
3316 __ bind(&allocated);
3319 __ AllocateHeapNumber(
3320 result, scratch1, scratch2, heap_number_map, gc_required);
3325 void BinaryOpStub::GenerateRegisterArgsPush(MacroAssembler* masm) {
3337 Label input_not_smi;
3340 Label invalid_cache;
3341 const Register scratch0 = t5;
3342 const Register scratch1 = t3;
3343 const Register cache_entry = a0;
3344 const bool tagged = (argument_type_ ==
TAGGED);
3347 CpuFeatures::Scope scope(
FPU);
3352 __ JumpIfNotSmi(a0, &input_not_smi);
3359 __ Move(a2, a3,
f4);
3362 __ bind(&input_not_smi);
3366 Heap::kHeapNumberMapRootIndex,
3375 __ Move(a2, a3,
f4);
3388 __ And(a1, a1, Operand(TranscendentalCache::SubCache::kCacheSize - 1));
3393 __ li(cache_entry, Operand(
3394 ExternalReference::transcendental_cache_array_address(
3397 __ lw(cache_entry,
MemOperand(cache_entry, type_ *
sizeof(
3398 Isolate::Current()->transcendental_cache()->caches_[0])));
3401 __ Branch(&invalid_cache,
eq, cache_entry, Operand(zero_reg));
3405 { TranscendentalCache::SubCache::Element test_elem[2];
3406 char* elem_start =
reinterpret_cast<char*
>(&test_elem[0]);
3407 char* elem2_start =
reinterpret_cast<char*
>(&test_elem[1]);
3408 char* elem_in0 =
reinterpret_cast<char*
>(&(test_elem[0].in[0]));
3409 char* elem_in1 =
reinterpret_cast<char*
>(&(test_elem[0].in[1]));
3410 char* elem_out =
reinterpret_cast<char*
>(&(test_elem[0].output));
3411 CHECK_EQ(12, elem2_start - elem_start);
3412 CHECK_EQ(0, elem_in0 - elem_start);
3420 __ Addu(a1, a1, t0);
3422 __ Addu(cache_entry, cache_entry, t0);
3428 __ Branch(&calculate,
ne, a2, Operand(t0));
3429 __ Branch(&calculate,
ne, a3, Operand(t1));
3431 Counters* counters = masm->isolate()->counters();
3432 __ IncrementCounter(
3433 counters->transcendental_cache_hit(), 1, scratch0, scratch1);
3445 __ bind(&calculate);
3446 Counters* counters = masm->isolate()->counters();
3447 __ IncrementCounter(
3448 counters->transcendental_cache_miss(), 1, scratch0, scratch1);
3450 __ bind(&invalid_cache);
3451 __ TailCallExternalReference(ExternalReference(RuntimeFunction(),
3457 CpuFeatures::Scope scope(
FPU);
3466 __ Push(cache_entry, a2, a3);
3467 GenerateCallCFunction(masm, scratch0);
3468 __ GetCFunctionDoubleResult(
f4);
3472 __ Pop(cache_entry, a2, a3);
3473 __ LoadRoot(t1, Heap::kHeapNumberMapRootIndex);
3474 __ AllocateHeapNumber(t2, scratch0, scratch1, t1, &no_update);
3482 __ mov(v0, cache_entry);
3484 __ bind(&invalid_cache);
3487 __ LoadRoot(t1, Heap::kHeapNumberMapRootIndex);
3488 __ AllocateHeapNumber(a0, scratch0, scratch1, t1, &skip_cache);
3493 __ CallRuntime(RuntimeFunction(), 1);
3498 __ bind(&skip_cache);
3501 GenerateCallCFunction(masm, scratch0);
3502 __ GetCFunctionDoubleResult(
f4);
3503 __ bind(&no_update);
3512 __ li(scratch0, Operand(4 * kPointerSize));
3514 __ CallRuntimeSaveDoubles(Runtime::kAllocateInNewSpace);
3521 void TranscendentalCacheStub::GenerateCallCFunction(MacroAssembler* masm,
3524 __ PrepareCallCFunction(2, scratch);
3526 __ Move(a0, a1,
f4);
3530 AllowExternalCallThatCantCauseGC scope(masm);
3531 Isolate* isolate = masm->isolate();
3535 ExternalReference::math_sin_double_function(isolate),
3540 ExternalReference::math_cos_double_function(isolate),
3544 __ CallCFunction(ExternalReference::math_tan_double_function(isolate),
3549 ExternalReference::math_log_double_function(isolate),
3569 return Runtime::kAbort;
3575 __ TailCallRuntime(Runtime::kStackGuard, 0, 1);
3580 __ TailCallRuntime(Runtime::kInterrupt, 0, 1);
3585 CpuFeatures::Scope fpu_scope(
FPU);
3586 const Register base = a1;
3587 const Register exponent = a2;
3588 const Register heapnumbermap = t1;
3589 const Register heapnumber = v0;
3594 const FPURegister single_scratch =
f8;
3595 const Register scratch = t5;
3596 const Register scratch2 = t3;
3598 Label call_runtime, done, int_exponent;
3600 Label base_is_smi, unpack_exponent;
3607 __ LoadRoot(heapnumbermap, Heap::kHeapNumberMapRootIndex);
3609 __ UntagAndJumpIfSmi(scratch, base, &base_is_smi);
3611 __ Branch(&call_runtime,
ne, scratch, Operand(heapnumbermap));
3614 __ jmp(&unpack_exponent);
3616 __ bind(&base_is_smi);
3617 __ mtc1(scratch, single_scratch);
3618 __ cvt_d_w(double_base, single_scratch);
3619 __ bind(&unpack_exponent);
3621 __ UntagAndJumpIfSmi(scratch, exponent, &int_exponent);
3624 __ Branch(&call_runtime,
ne, scratch, Operand(heapnumbermap));
3625 __ ldc1(double_exponent,
3627 }
else if (exponent_type_ ==
TAGGED) {
3629 __ UntagAndJumpIfSmi(scratch, exponent, &int_exponent);
3631 __ ldc1(double_exponent,
3635 if (exponent_type_ !=
INTEGER) {
3636 Label int_exponent_convert;
3645 __ Branch(&int_exponent_convert,
eq, scratch2, Operand(zero_reg));
3651 Label not_plus_half;
3654 __ Move(double_scratch, 0.5);
3666 __ neg_d(double_result, double_scratch);
3669 __ add_d(double_scratch, double_base, kDoubleRegZero);
3670 __ sqrt_d(double_result, double_scratch);
3673 __ bind(¬_plus_half);
3674 __ Move(double_scratch, -0.5);
3686 __ Move(double_result, kDoubleRegZero);
3689 __ add_d(double_scratch, double_base, kDoubleRegZero);
3690 __ Move(double_result, 1);
3691 __ sqrt_d(double_scratch, double_scratch);
3692 __ div_d(double_result, double_result, double_scratch);
3698 AllowExternalCallThatCantCauseGC scope(masm);
3699 __ PrepareCallCFunction(0, 2, scratch);
3700 __ SetCallCDoubleArguments(double_base, double_exponent);
3702 ExternalReference::power_double_double_function(masm->isolate()),
3706 __ GetCFunctionDoubleResult(double_result);
3709 __ bind(&int_exponent_convert);
3710 __ mfc1(scratch, single_scratch);
3714 __ bind(&int_exponent);
3717 if (exponent_type_ ==
INTEGER) {
3718 __ mov(scratch, exponent);
3721 __ mov(exponent, scratch);
3724 __ mov_d(double_scratch, double_base);
3725 __ Move(double_result, 1.0);
3728 Label positive_exponent;
3729 __ Branch(&positive_exponent,
ge, scratch, Operand(zero_reg));
3730 __ Subu(scratch, zero_reg, scratch);
3731 __ bind(&positive_exponent);
3733 Label while_true, no_carry, loop_end;
3734 __ bind(&while_true);
3736 __ And(scratch2, scratch, 1);
3738 __ Branch(&no_carry,
eq, scratch2, Operand(zero_reg));
3739 __ mul_d(double_result, double_result, double_scratch);
3742 __ sra(scratch, scratch, 1);
3744 __ Branch(&loop_end,
eq, scratch, Operand(zero_reg));
3745 __ mul_d(double_scratch, double_scratch, double_scratch);
3747 __ Branch(&while_true);
3751 __ Branch(&done,
ge, exponent, Operand(zero_reg));
3752 __ Move(double_scratch, 1.0);
3753 __ div_d(double_result, double_scratch, double_result);
3756 __ BranchF(&done,
NULL,
ne, double_result, kDoubleRegZero);
3760 __ mtc1(exponent, single_scratch);
3761 __ cvt_d_w(double_exponent, single_scratch);
3764 Counters* counters = masm->isolate()->counters();
3767 __ bind(&call_runtime);
3768 __ TailCallRuntime(Runtime::kMath_pow_cfunction, 2, 1);
3773 __ AllocateHeapNumber(
3774 heapnumber, scratch, scratch2, heapnumbermap, &call_runtime);
3775 __ sdc1(double_result,
3777 ASSERT(heapnumber.is(v0));
3778 __ IncrementCounter(counters->math_pow(), 1, scratch, scratch2);
3783 AllowExternalCallThatCantCauseGC scope(masm);
3784 __ PrepareCallCFunction(0, 2, scratch);
3785 __ SetCallCDoubleArguments(double_base, double_exponent);
3787 ExternalReference::power_double_double_function(masm->isolate()),
3791 __ GetCFunctionDoubleResult(double_result);
3794 __ IncrementCounter(counters->math_pow(), 1, scratch, scratch2);
3800 bool CEntryStub::NeedsImmovableCode() {
3806 return (!save_doubles_ ||
ISOLATE->fp_stubs_generated()) &&
3811 void CodeStub::GenerateStubsAheadOfTime() {
3819 void CodeStub::GenerateFPStubs() {
3821 Handle<Code> code = save_doubles.GetCode();
3822 code->set_is_pregenerated(
true);
3824 stub.GetCode()->set_is_pregenerated(
true);
3825 code->GetIsolate()->set_fp_stubs_generated(
true);
3831 Handle<Code> code = stub.GetCode();
3832 code->set_is_pregenerated(
true);
3836 void CEntryStub::GenerateCore(MacroAssembler* masm,
3837 Label* throw_normal_exception,
3838 Label* throw_termination_exception,
3839 Label* throw_out_of_memory_exception,
3841 bool always_allocate) {
3847 Isolate* isolate = masm->isolate();
3852 __ PrepareCallCFunction(1, 0, a1);
3853 __ CallCFunction(ExternalReference::perform_gc_function(isolate), 1, 0);
3856 ExternalReference scope_depth =
3857 ExternalReference::heap_always_allocate_scope_depth(isolate);
3858 if (always_allocate) {
3859 __ li(a0, Operand(scope_depth));
3861 __ Addu(a1, a1, Operand(1));
3873 __ AssertStackIsAligned();
3875 __ li(a2, Operand(ExternalReference::isolate_address()));
3887 masm->bal(&find_ra);
3889 masm->bind(&find_ra);
3894 const int kNumInstructionsToJump = 5;
3895 masm->Addu(ra, ra, kNumInstructionsToJump * kPointerSize);
3907 masm->InstructionsGeneratedSince(&find_ra));
3910 if (always_allocate) {
3912 __ li(a2, Operand(scope_depth));
3914 __ Subu(a3, a3, Operand(1));
3919 Label failure_returned;
3921 __ addiu(a2, v0, 1);
3932 __ LeaveExitFrame(save_doubles_,
s0,
true);
3936 __ bind(&failure_returned);
3939 __ Branch(&retry,
eq, t0, Operand(zero_reg));
3944 throw_out_of_memory_exception,
3947 Operand(reinterpret_cast<int32_t>(out_of_memory)));
3952 __ LoadRoot(a3, Heap::kTheHoleValueRootIndex);
3953 __ li(t0, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
3960 __ LoadRoot(t0, Heap::kTerminationExceptionRootIndex);
3961 __ Branch(throw_termination_exception,
eq, v0, Operand(t0));
3964 __ jmp(throw_normal_exception);
3994 FrameScope scope(masm, StackFrame::MANUAL);
3995 __ EnterExitFrame(save_doubles_);
4001 Label throw_normal_exception;
4002 Label throw_termination_exception;
4003 Label throw_out_of_memory_exception;
4007 &throw_normal_exception,
4008 &throw_termination_exception,
4009 &throw_out_of_memory_exception,
4015 &throw_normal_exception,
4016 &throw_termination_exception,
4017 &throw_out_of_memory_exception,
4023 __ li(v0, Operand(reinterpret_cast<int32_t>(failure)));
4025 &throw_normal_exception,
4026 &throw_termination_exception,
4027 &throw_out_of_memory_exception,
4031 __ bind(&throw_out_of_memory_exception);
4033 Isolate* isolate = masm->isolate();
4034 ExternalReference external_caught(Isolate::kExternalCaughtExceptionAddress,
4037 __ li(a2, Operand(external_caught));
4042 __ li(v0, Operand(reinterpret_cast<int32_t>(out_of_memory)));
4043 __ li(a2, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
4048 __ bind(&throw_termination_exception);
4049 __ ThrowUncatchable(v0);
4051 __ bind(&throw_normal_exception);
4057 Label invoke, handler_entry, exit;
4058 Isolate* isolate = masm->isolate();
4074 CpuFeatures::Scope scope(
FPU);
4078 __ Move(kDoubleRegZero, 0.0);
4088 __ InitializeRootRegister();
4092 __ li(t3, Operand(-1));
4093 int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY;
4096 __ li(t0, Operand(ExternalReference(Isolate::kCEntryFPAddress,
4099 __ Push(t3, t2, t1, t0);
4120 Label non_outermost_js;
4121 ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate);
4122 __ li(t1, Operand(ExternalReference(js_entry_sp)));
4124 __ Branch(&non_outermost_js,
ne, t2, Operand(zero_reg));
4126 __ li(t0, Operand(
Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
4130 __ bind(&non_outermost_js);
4131 __ li(t0, Operand(
Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME)));
4138 __ bind(&handler_entry);
4139 handler_offset_ = handler_entry.pos();
4144 __ li(t0, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
4154 __ PushTryHandler(StackHandler::JS_ENTRY, 0);
4161 __ LoadRoot(t1, Heap::kTheHoleValueRootIndex);
4162 __ li(t0, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
4185 ExternalReference construct_entry(Builtins::kJSConstructEntryTrampoline,
4187 __ li(t0, Operand(construct_entry));
4189 ExternalReference entry(Builtins::kJSEntryTrampoline, masm->isolate());
4190 __ li(t0, Operand(entry));
4203 Label non_outermost_js_2;
4205 __ Branch(&non_outermost_js_2,
4208 Operand(
Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
4209 __ li(t1, Operand(ExternalReference(js_entry_sp)));
4211 __ bind(&non_outermost_js_2);
4215 __ li(t0, Operand(ExternalReference(Isolate::kCEntryFPAddress,
4223 CpuFeatures::Scope scope(
FPU);
4245 ASSERT(HasArgsInRegisters() || !HasCallSiteInlineCheck());
4247 ASSERT(!ReturnTrueFalseObject() || HasCallSiteInlineCheck());
4250 const Register
object = a0;
4252 const Register
function = a1;
4253 const Register prototype = t0;
4254 const Register inline_site = t5;
4255 const Register scratch = a2;
4259 Label slow, loop, is_instance, is_not_instance, not_js_object;
4261 if (!HasArgsInRegisters()) {
4267 __ JumpIfSmi(
object, ¬_js_object);
4268 __ IsObjectJSObjectType(
object, map, scratch, ¬_js_object);
4272 if (!HasCallSiteInlineCheck()) {
4274 __ LoadRoot(at, Heap::kInstanceofCacheFunctionRootIndex);
4275 __ Branch(&miss,
ne,
function, Operand(at));
4276 __ LoadRoot(at, Heap::kInstanceofCacheMapRootIndex);
4277 __ Branch(&miss,
ne, map, Operand(at));
4278 __ LoadRoot(v0, Heap::kInstanceofCacheAnswerRootIndex);
4279 __ DropAndRet(HasArgsInRegisters() ? 0 : 2);
4285 __ TryGetFunctionPrototype(
function, prototype, scratch, &slow,
true);
4288 __ JumpIfSmi(prototype, &slow);
4289 __ IsObjectJSObjectType(prototype, scratch, scratch, &slow);
4293 if (!HasCallSiteInlineCheck()) {
4294 __ StoreRoot(
function, Heap::kInstanceofCacheFunctionRootIndex);
4295 __ StoreRoot(map, Heap::kInstanceofCacheMapRootIndex);
4297 ASSERT(HasArgsInRegisters());
4302 __ LoadFromSafepointRegisterSlot(scratch, t0);
4303 __ Subu(inline_site, ra, scratch);
4305 __ GetRelocatedValue(inline_site, scratch, v1);
4314 Register scratch2 = map;
4318 __ LoadRoot(scratch2, Heap::kNullValueRootIndex);
4320 __ Branch(&is_instance,
eq, scratch, Operand(prototype));
4321 __ Branch(&is_not_instance,
eq, scratch, Operand(scratch2));
4326 __ bind(&is_instance);
4328 if (!HasCallSiteInlineCheck()) {
4329 __ mov(v0, zero_reg);
4330 __ StoreRoot(v0, Heap::kInstanceofCacheAnswerRootIndex);
4333 __ LoadRoot(v0, Heap::kTrueValueRootIndex);
4334 __ Addu(inline_site, inline_site, Operand(kDeltaToLoadBoolResult));
4336 __ PatchRelocatedValue(inline_site, scratch, v0);
4338 if (!ReturnTrueFalseObject()) {
4340 __ mov(v0, zero_reg);
4343 __ DropAndRet(HasArgsInRegisters() ? 0 : 2);
4345 __ bind(&is_not_instance);
4346 if (!HasCallSiteInlineCheck()) {
4348 __ StoreRoot(v0, Heap::kInstanceofCacheAnswerRootIndex);
4351 __ LoadRoot(v0, Heap::kFalseValueRootIndex);
4352 __ Addu(inline_site, inline_site, Operand(kDeltaToLoadBoolResult));
4354 __ PatchRelocatedValue(inline_site, scratch, v0);
4356 if (!ReturnTrueFalseObject()) {
4361 __ DropAndRet(HasArgsInRegisters() ? 0 : 2);
4363 Label object_not_null, object_not_null_or_smi;
4364 __ bind(¬_js_object);
4367 __ JumpIfSmi(
function, &slow);
4368 __ GetObjectType(
function, scratch2, scratch);
4372 __ Branch(&object_not_null,
4375 Operand(masm->isolate()->factory()->null_value()));
4377 __ DropAndRet(HasArgsInRegisters() ? 0 : 2);
4379 __ bind(&object_not_null);
4381 __ JumpIfNotSmi(
object, &object_not_null_or_smi);
4383 __ DropAndRet(HasArgsInRegisters() ? 0 : 2);
4385 __ bind(&object_not_null_or_smi);
4387 __ IsObjectJSStringType(
object, scratch, &slow);
4389 __ DropAndRet(HasArgsInRegisters() ? 0 : 2);
4393 if (!ReturnTrueFalseObject()) {
4394 if (HasArgsInRegisters()) {
4405 __ LoadRoot(v0, Heap::kTrueValueRootIndex);
4406 __ DropAndRet(HasArgsInRegisters() ? 0 : 2,
eq, a0, Operand(zero_reg));
4407 __ LoadRoot(v0, Heap::kFalseValueRootIndex);
4408 __ DropAndRet(HasArgsInRegisters() ? 0 : 2);
4419 void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
4422 const int kDisplacement =
4427 __ JumpIfNotSmi(a1, &slow);
4441 __ Branch(&slow,
hs, a1, Operand(a0));
4444 __ subu(a3, a0, a1);
4446 __ Addu(a3,
fp, Operand(t3));
4458 __ subu(a3, a0, a1);
4460 __ Addu(a3, a2, Operand(t3));
4468 __ TailCallRuntime(Runtime::kGetArgumentsProperty, 1, 1);
4472 void ArgumentsAccessStub::GenerateNewNonStrictSlow(MacroAssembler* masm) {
4489 __ Addu(a3, a3, Operand(t3));
4494 __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1);
4498 void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) {
4512 Label adaptor_frame, try_allocate;
4515 __ Branch(&adaptor_frame,
4522 __ b(&try_allocate);
4526 __ bind(&adaptor_frame);
4529 __ Addu(a3, a3, Operand(t6));
4537 __ Branch(&skip_min,
lt, a1, Operand(a2));
4541 __ bind(&try_allocate);
4545 const int kParameterMapHeaderSize =
4548 Label param_map_size;
4551 __ mov(t5, zero_reg);
4553 __ addiu(t5, t5, kParameterMapHeaderSize);
4554 __ bind(¶m_map_size);
4558 __ Addu(t5, t5, Operand(t6));
4565 __ AllocateInNewSpace(t5, v0, a3, t0, &runtime,
TAG_OBJECT);
4570 const int kNormalOffset =
4572 const int kAliasedOffset =
4577 Label skip2_ne, skip2_eq;
4578 __ Branch(&skip2_ne,
ne, a1, Operand(zero_reg));
4582 __ Branch(&skip2_eq,
eq, a1, Operand(zero_reg));
4599 const int kCalleeOffset = JSObject::kHeaderSize +
4605 const int kLengthOffset = JSObject::kHeaderSize +
4620 Label skip_parameter_map;
4630 __ LoadRoot(t2, Heap::kNonStrictArgumentsElementsMapRootIndex);
4636 __ Addu(t2, t0, Operand(t6));
4637 __ Addu(t2, t2, Operand(kParameterMapHeaderSize));
4648 Label parameters_loop, parameters_test;
4651 __ Addu(t5, t5, Operand(
Smi::FromInt(Context::MIN_CONTEXT_SLOTS)));
4652 __ Subu(t5, t5, Operand(a1));
4653 __ LoadRoot(t3, Heap::kTheHoleValueRootIndex);
4655 __ Addu(a3, t0, Operand(t6));
4656 __ Addu(a3, a3, Operand(kParameterMapHeaderSize));
4664 __ jmp(¶meters_test);
4666 __ bind(¶meters_loop);
4670 __ Addu(t6, t0, t1);
4673 __ Addu(t6, a3, t1);
4676 __ bind(¶meters_test);
4679 __ bind(&skip_parameter_map);
4684 __ LoadRoot(t1, Heap::kFixedArrayMapRootIndex);
4688 Label arguments_loop, arguments_test;
4692 __ Subu(t0, t0, Operand(t6));
4693 __ jmp(&arguments_test);
4695 __ bind(&arguments_loop);
4696 __ Subu(t0, t0, Operand(kPointerSize));
4699 __ Addu(t1, a3, Operand(t6));
4703 __ bind(&arguments_test);
4704 __ Branch(&arguments_loop,
lt, t5, Operand(a2));
4713 __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1);
4717 void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
4722 Label adaptor_frame, try_allocate, runtime;
4725 __ Branch(&adaptor_frame,
4732 __ Branch(&try_allocate);
4735 __ bind(&adaptor_frame);
4739 __ Addu(a3, a2, Operand(at));
4746 Label add_arguments_object;
4747 __ bind(&try_allocate);
4748 __ Branch(&add_arguments_object,
eq, a1, Operand(zero_reg));
4752 __ bind(&add_arguments_object);
4756 __ AllocateInNewSpace(a1,
4771 __ CopyFields(v0, t0, a3.bit(), JSObject::kHeaderSize /
kPointerSize);
4780 __ Branch(&done,
eq, a1, Operand(zero_reg));
4789 __ LoadRoot(a3, Heap::kFixedArrayMapRootIndex);
4802 __ Addu(a2, a2, Operand(-kPointerSize));
4806 __ Addu(t0, t0, Operand(kPointerSize));
4807 __ Subu(a1, a1, Operand(1));
4808 __ Branch(&loop,
ne, a1, Operand(zero_reg));
4816 __ TailCallRuntime(Runtime::kNewStrictArgumentsFast, 3, 1);
4820 void RegExpExecStub::Generate(MacroAssembler* masm) {
4824 #ifdef V8_INTERPRETED_REGEXP
4825 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
4826 #else // V8_INTERPRETED_REGEXP
4839 Isolate* isolate = masm->isolate();
4841 Label runtime, invoke_regexp;
4849 Register subject =
s0;
4850 Register regexp_data =
s1;
4851 Register last_match_info_elements =
s2;
4854 ExternalReference address_of_regexp_stack_memory_address =
4855 ExternalReference::address_of_regexp_stack_memory_address(
4857 ExternalReference address_of_regexp_stack_memory_size =
4858 ExternalReference::address_of_regexp_stack_memory_size(isolate);
4859 __ li(a0, Operand(address_of_regexp_stack_memory_size));
4861 __ Branch(&runtime,
eq, a0, Operand(zero_reg));
4866 __ JumpIfSmi(a0, &runtime);
4867 __ GetObjectType(a0, a1, a1);
4872 if (FLAG_debug_code) {
4875 "Unexpected type for RegExp data, FixedArray expected",
4878 __ GetObjectType(regexp_data, a0, a0);
4880 "Unexpected type for RegExp data, FixedArray expected",
4898 __ Addu(a2, a2, Operand(2));
4906 __ JumpIfSmi(subject, &runtime);
4907 __ GetObjectType(subject, a0, a0);
4910 __ Branch(&runtime,
ne, a0, Operand(zero_reg));
4922 __ JumpIfNotSmi(a0, &runtime);
4923 __ Branch(&runtime,
ls, a3, Operand(a0));
4930 __ JumpIfSmi(a0, &runtime);
4931 __ GetObjectType(a0, a1, a1);
4934 __ lw(last_match_info_elements,
4937 __ Branch(&runtime,
ne, a0, Operand(
4938 isolate->factory()->fixed_array_map()));
4945 __ Branch(&runtime,
gt, a2, Operand(at));
4948 __ mov(t0, zero_reg);
4963 __ Branch(&seq_string,
eq, a1, Operand(zero_reg));
4975 Label cons_string, external_string, check_encoding;
4986 __ Branch(&runtime,
ne, at, Operand(zero_reg));
4993 __ jmp(&check_encoding);
4995 __ bind(&cons_string);
4997 __ LoadRoot(a1, Heap::kEmptyStringRootIndex);
4998 __ Branch(&runtime,
ne, a0, Operand(a1));
5001 __ bind(&check_encoding);
5006 __ Branch(&external_string,
ne, at, Operand(zero_reg));
5008 __ bind(&seq_string);
5020 __ Movz(t9, t1, a0);
5025 __ JumpIfSmi(t9, &runtime);
5042 __ IncrementCounter(isolate->counters()->regexp_entry_native(),
5046 const int kRegExpExecuteArguments = 9;
5047 const int kParameterRegisters = 4;
5048 __ EnterExitFrame(
false, kRegExpExecuteArguments - kParameterRegisters);
5066 __ li(a0, Operand(ExternalReference::isolate_address()));
5070 __ li(a0, Operand(1));
5074 __ li(a0, Operand(address_of_regexp_stack_memory_address));
5076 __ li(a2, Operand(address_of_regexp_stack_memory_size));
5078 __ addu(a0, a0, a2);
5083 __ mov(a0, zero_reg);
5088 ExternalReference::address_of_static_offsets_vector(isolate)));
5094 __ Xor(a3, a3, Operand(1));
5099 __ lw(subject,
MemOperand(
fp, kSubjectOffset + 2 * kPointerSize));
5104 __ sllv(t1, t0, a3);
5105 __ addu(t0, t2, t1);
5106 __ sllv(t1, a1, a3);
5107 __ addu(a2, t0, t1);
5111 __ sllv(t1, t2, a3);
5112 __ addu(a3, t0, t1);
5117 __ mov(a0, subject);
5121 DirectCEntryStub stub;
5122 stub.GenerateCall(masm, t9);
5134 __ Branch(&success,
eq, v0, Operand(1));
5145 __ li(a1, Operand(isolate->factory()->the_hole_value()));
5146 __ li(a2, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
5149 __ Branch(&runtime,
eq, v0, Operand(a1));
5154 __ LoadRoot(a0, Heap::kTerminationExceptionRootIndex);
5155 Label termination_exception;
5156 __ Branch(&termination_exception,
eq, v0, Operand(a0));
5160 __ bind(&termination_exception);
5161 __ ThrowUncatchable(v0);
5165 __ li(v0, Operand(isolate->factory()->null_value()));
5175 __ Addu(a1, a1, Operand(2));
5187 __ mov(a2, subject);
5188 __ RecordWriteField(last_match_info_elements,
5197 __ RecordWriteField(last_match_info_elements,
5205 ExternalReference address_of_static_offsets_vector =
5206 ExternalReference::address_of_static_offsets_vector(isolate);
5207 __ li(a2, Operand(address_of_static_offsets_vector));
5211 Label next_capture, done;
5215 last_match_info_elements,
5217 __ bind(&next_capture);
5218 __ Subu(a1, a1, Operand(1));
5219 __ Branch(&done,
lt, a1, Operand(zero_reg));
5222 __ addiu(a2, a2, kPointerSize);
5227 __ addiu(a0, a0, kPointerSize);
5237 __ bind(&external_string);
5240 if (FLAG_debug_code) {
5245 "external string expected, but not found",
5256 __ jmp(&seq_string);
5260 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
5261 #endif // V8_INTERPRETED_REGEXP
5265 void RegExpConstructResultStub::Generate(MacroAssembler* masm) {
5266 const int kMaxInlineLength = 100;
5272 __ JumpIfNotSmi(a1, &slowcase);
5283 __ Addu(a2, t1, Operand(objects_size));
5284 __ AllocateInNewSpace(
5301 __ li(t0, Operand(masm->isolate()->factory()->empty_fixed_array()));
5322 __ li(a2, Operand(masm->isolate()->factory()->fixed_array_map()));
5328 __ li(a2, Operand(masm->isolate()->factory()->the_hole_value()));
5337 __ addu(t1, t1, a3);
5339 __ Branch(&done,
ge, a3, Operand(t1));
5342 __ addiu(a3, a3, kPointerSize);
5348 __ TailCallRuntime(Runtime::kRegExpConstructResult, 3, 1);
5352 static void GenerateRecordCallTarget(MacroAssembler* masm) {
5361 masm->isolate()->heap()->undefined_value());
5363 masm->isolate()->heap()->the_hole_value());
5370 __ Branch(&done,
eq, a3, Operand(a1));
5371 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
5372 __ Branch(&done,
eq, a3, Operand(at));
5376 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
5387 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
5397 Label slow, non_function;
5402 if (ReceiverMightBeImplicit()) {
5408 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
5409 __ Branch(&call,
ne, t0, Operand(at));
5419 __ JumpIfSmi(a1, &non_function);
5421 __ GetObjectType(a1, a3, a3);
5424 if (RecordCallTarget()) {
5425 GenerateRecordCallTarget(masm);
5430 ParameterCount actual(argc_);
5432 if (ReceiverMightBeImplicit()) {
5433 Label call_as_function;
5434 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
5435 __ Branch(&call_as_function,
eq, t0, Operand(at));
5436 __ InvokeFunction(a1,
5441 __ bind(&call_as_function);
5443 __ InvokeFunction(a1,
5451 if (RecordCallTarget()) {
5456 masm->isolate()->heap()->undefined_value());
5457 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
5465 __ GetBuiltinEntry(a3, Builtins::CALL_FUNCTION_PROXY);
5468 Handle<Code> adaptor =
5469 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline();
5470 __ Jump(adaptor, RelocInfo::CODE_TARGET);
5475 __ bind(&non_function);
5477 __ li(a0, Operand(argc_));
5478 __ mov(a2, zero_reg);
5479 __ GetBuiltinEntry(a3, Builtins::CALL_NON_FUNCTION);
5481 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
5482 RelocInfo::CODE_TARGET);
5490 Label slow, non_function_call;
5493 __ JumpIfSmi(a1, &non_function_call);
5495 __ GetObjectType(a1, a3, a3);
5498 if (RecordCallTarget()) {
5499 GenerateRecordCallTarget(masm);
5514 __ GetBuiltinEntry(a3, Builtins::CALL_FUNCTION_PROXY_AS_CONSTRUCTOR);
5517 __ bind(&non_function_call);
5518 __ GetBuiltinEntry(a3, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR);
5523 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
5524 RelocInfo::CODE_TARGET);
5530 void CompareStub::PrintName(StringStream* stream) {
5532 (lhs_.
is(a1) && rhs_.
is(a0)));
5533 const char* cc_name;
5535 case lt: cc_name =
"LT";
break;
5536 case gt: cc_name =
"GT";
break;
5537 case le: cc_name =
"LE";
break;
5538 case ge: cc_name =
"GE";
break;
5539 case eq: cc_name =
"EQ";
break;
5540 case ne: cc_name =
"NE";
break;
5541 default: cc_name =
"UnknownCondition";
break;
5543 bool is_equality = cc_ ==
eq || cc_ ==
ne;
5544 stream->Add(
"CompareStub_%s", cc_name);
5545 stream->Add(lhs_.
is(a0) ?
"_a0" :
"_a1");
5546 stream->Add(rhs_.
is(a0) ?
"_a0" :
"_a1");
5547 if (strict_ && is_equality) stream->Add(
"_STRICT");
5548 if (never_nan_nan_ && is_equality) stream->Add(
"_NO_NAN");
5549 if (!include_number_compare_) stream->Add(
"_NO_NUMBER");
5550 if (!include_smi_compare_) stream->Add(
"_NO_SMI");
5554 int CompareStub::MinorKey() {
5556 ASSERT(static_cast<unsigned>(cc_) < (1 << 14));
5558 (lhs_.
is(a1) && rhs_.
is(a0)));
5559 return ConditionField::encode(static_cast<unsigned>(cc_))
5560 | RegisterField::encode(lhs_.
is(a0))
5561 | StrictField::encode(strict_)
5562 | NeverNanNanField::encode(cc_ ==
eq ? never_nan_nan_ :
false)
5563 | IncludeSmiCompareField::encode(include_smi_compare_);
5571 Label got_char_code;
5572 Label sliced_string;
5579 __ JumpIfSmi(object_, receiver_not_string_);
5586 __ Branch(receiver_not_string_,
ne, t0, Operand(zero_reg));
5589 __ JumpIfNotSmi(index_, &index_not_smi_);
5591 __ bind(&got_smi_index_);
5595 __ Branch(index_out_of_range_,
ls, t0, Operand(index_));
5611 MacroAssembler* masm,
5612 const RuntimeCallHelper& call_helper) {
5613 __ Abort(
"Unexpected fallthrough to CharCodeAt slow case");
5616 __ bind(&index_not_smi_);
5620 Heap::kHeapNumberMapRootIndex,
5623 call_helper.BeforeCall(masm);
5625 __ Push(object_, index_);
5627 __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero, 1);
5631 __ CallRuntime(Runtime::kNumberToSmi, 1);
5637 __ Move(index_, v0);
5642 call_helper.AfterCall(masm);
5644 __ JumpIfNotSmi(index_, index_out_of_range_);
5646 __ Branch(&got_smi_index_);
5651 __ bind(&call_runtime_);
5652 call_helper.BeforeCall(masm);
5654 __ Push(object_, index_);
5655 __ CallRuntime(Runtime::kStringCharCodeAt, 2);
5657 __ Move(result_, v0);
5659 call_helper.AfterCall(masm);
5662 __ Abort(
"Unexpected fallthrough from CharCodeAt slow case");
5682 __ Branch(&slow_case_,
ne, t0, Operand(zero_reg));
5684 __ LoadRoot(result_, Heap::kSingleCharacterStringCacheRootIndex);
5688 __ Addu(result_, result_, t0);
5690 __ LoadRoot(t0, Heap::kUndefinedValueRootIndex);
5691 __ Branch(&slow_case_,
eq, result_, Operand(t0));
5697 MacroAssembler* masm,
5698 const RuntimeCallHelper& call_helper) {
5699 __ Abort(
"Unexpected fallthrough to CharFromCode slow case");
5701 __ bind(&slow_case_);
5702 call_helper.BeforeCall(masm);
5704 __ CallRuntime(Runtime::kCharFromCode, 1);
5705 __ Move(result_, v0);
5707 call_helper.AfterCall(masm);
5710 __ Abort(
"Unexpected fallthrough from CharFromCode slow case");
5724 MacroAssembler* masm,
5725 const RuntimeCallHelper& call_helper) {
5726 char_code_at_generator_.
GenerateSlow(masm, call_helper);
5727 char_from_code_generator_.
GenerateSlow(masm, call_helper);
5742 __ addu(count, count, count);
5744 __ Branch(&done,
eq, count, Operand(zero_reg));
5745 __ addu(count, dest, count);
5749 __ addiu(src, src, 1);
5751 __ addiu(dest, dest, 1);
5752 __ Branch(&loop,
lt, dest, Operand(count));
5758 enum CopyCharactersFlags {
5760 DEST_ALWAYS_ALIGNED = 2
5774 bool ascii = (flags & COPY_ASCII) != 0;
5775 bool dest_always_aligned = (flags & DEST_ALWAYS_ALIGNED) != 0;
5777 if (dest_always_aligned && FLAG_debug_code) {
5782 "Destination of copy not aligned.",
5787 const int kReadAlignment = 4;
5788 const int kReadAlignmentMask = kReadAlignment - 1;
5798 __ addu(count, count, count);
5800 __ Branch(&done,
eq, count, Operand(zero_reg));
5804 __ Subu(scratch1, count, Operand(8));
5805 __ Addu(count, dest, Operand(count));
5806 Register limit = count;
5807 __ Branch(&byte_loop,
lt, scratch1, Operand(zero_reg));
5809 if (!dest_always_aligned) {
5811 __ And(scratch4, dest, Operand(kReadAlignmentMask));
5813 __ Branch(&dest_aligned,
eq, scratch4, Operand(zero_reg));
5815 __ bind(&aligned_loop);
5817 __ addiu(src, src, 1);
5819 __ addiu(dest, dest, 1);
5820 __ addiu(scratch4, scratch4, 1);
5821 __ Branch(&aligned_loop,
le, scratch4, Operand(kReadAlignmentMask));
5822 __ bind(&dest_aligned);
5827 __ And(scratch4, src, Operand(kReadAlignmentMask));
5828 __ Branch(&simple_loop,
eq, scratch4, Operand(zero_reg));
5837 __ Addu(src, src, Operand(kReadAlignment));
5840 __ Addu(dest, dest, Operand(kReadAlignment));
5841 __ Subu(scratch2, limit, dest);
5842 __ Branch(&loop,
ge, scratch2, Operand(kReadAlignment));
5845 __ Branch(&byte_loop);
5850 __ bind(&simple_loop);
5855 __ Addu(src, src, Operand(kReadAlignment));
5857 __ Addu(dest, dest, Operand(kReadAlignment));
5858 __ Subu(scratch2, limit, dest);
5859 __ Branch(&loop,
ge, scratch2, Operand(kReadAlignment));
5863 __ bind(&byte_loop);
5865 __ Branch(&done,
ge, dest, Operand(limit));
5867 __ addiu(src, src, 1);
5869 __ addiu(dest, dest, 1);
5870 __ Branch(&byte_loop);
5886 Register scratch = scratch3;
5890 Label not_array_index;
5891 __ Subu(scratch, c1, Operand(static_cast<int>(
'0')));
5892 __ Branch(¬_array_index,
5895 Operand(static_cast<int>(
'9' -
'0')));
5896 __ Subu(scratch, c2, Operand(static_cast<int>(
'0')));
5903 __ Branch(&tmp,
Ugreater, scratch, Operand(static_cast<int>(
'9' -
'0')));
5904 __ Or(c1, c1, scratch1);
5907 not_found,
Uless_equal, scratch, Operand(static_cast<int>(
'9' -
'0')));
5909 __ bind(¬_array_index);
5911 Register hash = scratch1;
5917 Register chars = c1;
5919 __ Or(chars, chars, scratch);
5926 Register symbol_table = c2;
5929 Register undefined = scratch4;
5930 __ LoadRoot(undefined, Heap::kUndefinedValueRootIndex);
5933 Register mask = scratch2;
5935 __ sra(mask, mask, 1);
5936 __ Addu(mask, mask, -1);
5939 Register first_symbol_table_element = symbol_table;
5940 __ Addu(first_symbol_table_element, symbol_table,
5953 const int kProbes = 4;
5954 Label found_in_symbol_table;
5955 Label next_probe[kProbes];
5956 Register candidate = scratch5;
5957 for (
int i = 0; i < kProbes; i++) {
5960 __ Addu(candidate, hash, Operand(SymbolTable::GetProbeOffset(i)));
5962 __ mov(candidate, hash);
5965 __ And(candidate, candidate, Operand(mask));
5970 __ Addu(scratch, scratch, first_symbol_table_element);
5975 __ GetObjectType(candidate, scratch, scratch);
5978 __ Branch(not_found,
eq, undefined, Operand(candidate));
5980 if (FLAG_debug_code) {
5981 __ LoadRoot(scratch, Heap::kTheHoleValueRootIndex);
5982 __ Assert(
eq,
"oddball in symbol table is not undefined or the hole",
5983 scratch, Operand(candidate));
5985 __ jmp(&next_probe[i]);
5987 __ bind(&is_string);
5992 __ JumpIfInstanceTypeIsNotSequentialAscii(scratch, scratch, &next_probe[i]);
6001 __ Branch(&found_in_symbol_table,
eq, chars, Operand(scratch));
6002 __ bind(&next_probe[i]);
6009 Register result = candidate;
6010 __ bind(&found_in_symbol_table);
6017 Register character) {
6019 __ LoadRoot(hash, Heap::kHashSeedRootIndex);
6022 __ addu(hash, hash, character);
6023 __ sll(at, hash, 10);
6024 __ addu(hash, hash, at);
6026 __ srl(at, hash, 6);
6027 __ xor_(hash, hash, at);
6033 Register character) {
6035 __ addu(hash, hash, character);
6037 __ sll(at, hash, 10);
6038 __ addu(hash, hash, at);
6040 __ srl(at, hash, 6);
6041 __ xor_(hash, hash, at);
6048 __ sll(at, hash, 3);
6049 __ addu(hash, hash, at);
6051 __ srl(at, hash, 11);
6052 __ xor_(hash, hash, at);
6054 __ sll(at, hash, 15);
6055 __ addu(hash, hash, at);
6058 __ and_(hash, hash, at);
6062 __ Movz(hash, at, hash);
6066 void SubStringStub::Generate(MacroAssembler* masm) {
6093 __ UntagAndJumpIfNotSmi(a2, a2, &runtime);
6094 __ UntagAndJumpIfNotSmi(a3, a3, &runtime);
6097 __ Branch(&runtime,
lt, a3, Operand(zero_reg));
6099 __ Branch(&runtime,
gt, a3, Operand(a2));
6100 __ Subu(a2, a2, a3);
6104 __ JumpIfSmi(v0, &runtime);
6109 __ Branch(&runtime,
ne, t0, Operand(zero_reg));
6118 __ Branch(&return_v0,
eq, a2, Operand(t0));
6120 __ Branch(&runtime,
hi, a2, Operand(t0));
6129 Label underlying_unpacked, sliced_string, seq_or_external_string;
6137 __ Branch(&sliced_string,
ne, t0, Operand(zero_reg));
6140 __ LoadRoot(t0, Heap::kEmptyStringRootIndex);
6141 __ Branch(&runtime,
ne, t1, Operand(t0));
6146 __ jmp(&underlying_unpacked);
6148 __ bind(&sliced_string);
6153 __ Addu(a3, a3, t0);
6157 __ jmp(&underlying_unpacked);
6159 __ bind(&seq_or_external_string);
6163 __ bind(&underlying_unpacked);
6165 if (FLAG_string_slices) {
6178 Label two_byte_slice, set_slice_header;
6182 __ Branch(&two_byte_slice,
eq, t0, Operand(zero_reg));
6183 __ AllocateAsciiSlicedString(v0, a2, t2, t3, &runtime);
6184 __ jmp(&set_slice_header);
6185 __ bind(&two_byte_slice);
6186 __ AllocateTwoByteSlicedString(v0, a2, t2, t3, &runtime);
6187 __ bind(&set_slice_header);
6193 __ bind(©_routine);
6200 Label two_byte_sequential, sequential_string, allocate_result;
6204 __ Branch(&sequential_string,
eq, t0, Operand(zero_reg));
6210 __ Branch(&runtime,
ne, t0, Operand(zero_reg));
6213 __ jmp(&allocate_result);
6215 __ bind(&sequential_string);
6220 __ bind(&allocate_result);
6224 __ Branch(&two_byte_sequential,
eq, t0, Operand(zero_reg));
6227 __ AllocateAsciiString(v0, a2, t0, t2, t3, &runtime);
6230 __ Addu(t1, t1, a3);
6241 masm, a1, t1, a2, a3, t0, t2, t3, t4, COPY_ASCII | DEST_ALWAYS_ALIGNED);
6245 __ bind(&two_byte_sequential);
6246 __ AllocateTwoByteString(v0, a2, t0, t2, t3, &runtime);
6251 __ Addu(t1, t1, t0);
6261 masm, a1, t1, a2, a3, t0, t2, t3, t4, DEST_ALWAYS_ALIGNED);
6263 __ bind(&return_v0);
6264 Counters* counters = masm->isolate()->counters();
6265 __ IncrementCounter(counters->sub_string_native(), 1, a3, t0);
6270 __ TailCallRuntime(Runtime::kSubString, 3, 1);
6279 Register scratch3) {
6280 Register length = scratch1;
6283 Label strings_not_equal, check_zero_length;
6286 __ Branch(&check_zero_length,
eq, length, Operand(scratch2));
6287 __ bind(&strings_not_equal);
6292 Label compare_chars;
6293 __ bind(&check_zero_length);
6295 __ Branch(&compare_chars,
ne, length, Operand(zero_reg));
6300 __ bind(&compare_chars);
6302 GenerateAsciiCharsCompareLoop(masm,
6303 left, right, length, scratch2, scratch3, v0,
6304 &strings_not_equal);
6318 Register scratch4) {
6319 Label result_not_equal, compare_lengths;
6323 __ Subu(scratch3, scratch1, Operand(scratch2));
6324 Register length_delta = scratch3;
6325 __ slt(scratch4, scratch2, scratch1);
6326 __ Movn(scratch1, scratch2, scratch4);
6327 Register min_length = scratch1;
6329 __ Branch(&compare_lengths,
eq, min_length, Operand(zero_reg));
6332 GenerateAsciiCharsCompareLoop(masm,
6333 left, right, min_length, scratch2, scratch4, v0,
6337 __ bind(&compare_lengths);
6340 __ mov(scratch2, length_delta);
6341 __ mov(scratch4, zero_reg);
6342 __ mov(v0, zero_reg);
6344 __ bind(&result_not_equal);
6348 __ Branch(&ret,
eq, scratch2, Operand(scratch4));
6350 __ Branch(&ret,
gt, scratch2, Operand(scratch4));
6357 void StringCompareStub::GenerateAsciiCharsCompareLoop(
6358 MacroAssembler* masm,
6365 Label* chars_not_equal) {
6369 __ SmiUntag(length);
6370 __ Addu(scratch1, length,
6372 __ Addu(left, left, Operand(scratch1));
6373 __ Addu(right, right, Operand(scratch1));
6374 __ Subu(length, zero_reg, length);
6375 Register index = length;
6381 __ Addu(scratch3, left, index);
6383 __ Addu(scratch3, right, index);
6385 __ Branch(chars_not_equal,
ne, scratch1, Operand(scratch2));
6386 __ Addu(index, index, 1);
6387 __ Branch(&loop,
ne, index, Operand(zero_reg));
6391 void StringCompareStub::Generate(MacroAssembler* masm) {
6394 Counters* counters = masm->isolate()->counters();
6403 __ Branch(¬_same,
ne, a0, Operand(a1));
6407 __ IncrementCounter(counters->string_compare_native(), 1, a1, a2);
6413 __ JumpIfNotBothSequentialAsciiStrings(a1, a0, a2, a3, &runtime);
6416 __ IncrementCounter(counters->string_compare_native(), 1, a2, a3);
6417 __ Addu(
sp,
sp, Operand(2 * kPointerSize));
6421 __ TailCallRuntime(Runtime::kStringCompare, 2, 1);
6425 void StringAddStub::Generate(MacroAssembler* masm) {
6426 Label call_runtime, call_builtin;
6429 Counters* counters = masm->isolate()->counters();
6441 __ JumpIfEitherSmi(a0, a1, &call_runtime);
6449 __ Or(t4, t0, Operand(t1));
6451 __ Branch(&call_runtime,
ne, t4, Operand(zero_reg));
6457 GenerateConvertArgument(
6458 masm, 1 * kPointerSize, a0, a2, a3, t0, t1, &call_builtin);
6459 builtin_id = Builtins::STRING_ADD_RIGHT;
6461 ASSERT((flags_ & NO_STRING_CHECK_LEFT_IN_STUB) != 0);
6462 GenerateConvertArgument(
6463 masm, 0 * kPointerSize, a1, a2, a3, t0, t1, &call_builtin);
6464 builtin_id = Builtins::STRING_ADD_LEFT;
6474 Label strings_not_empty;
6483 __ Movz(v0, a1, a2);
6484 __ slt(t4, zero_reg, a2);
6485 __ slt(t5, zero_reg, a3);
6486 __ and_(t4, t4, t5);
6487 __ Branch(&strings_not_empty,
ne, t4, Operand(zero_reg));
6489 __ IncrementCounter(counters->string_add_native(), 1, a2, a3);
6492 __ bind(&strings_not_empty);
6507 Label string_add_flat_result, longer_than_two;
6510 __ Addu(t2, a2, Operand(a3));
6513 __ Branch(&longer_than_two,
ne, t2, Operand(2));
6522 __ JumpIfBothInstanceTypesAreNotSequentialAscii(t0, t1, t2, t3,
6531 Label make_two_character_string;
6533 masm, a2, a3, t2, t3, t0, t1, t5, &make_two_character_string);
6534 __ IncrementCounter(counters->string_add_native(), 1, a2, a3);
6537 __ bind(&make_two_character_string);
6543 __ li(t2, Operand(2));
6544 __ AllocateAsciiString(v0, t2, t0, t1, t5, &call_runtime);
6546 __ IncrementCounter(counters->string_add_native(), 1, a2, a3);
6549 __ bind(&longer_than_two);
6566 Label non_ascii, allocated, ascii_data;
6569 __ And(t4, t0, Operand(t1));
6571 __ Branch(&non_ascii,
eq, t4, Operand(zero_reg));
6574 __ bind(&ascii_data);
6575 __ AllocateAsciiConsString(v0, t2, t0, t1, &call_runtime);
6576 __ bind(&allocated);
6580 __ IncrementCounter(counters->string_add_native(), 1, a2, a3);
6583 __ bind(&non_ascii);
6590 __ and_(at, at, t1);
6591 __ Branch(&ascii_data,
ne, at, Operand(zero_reg));
6593 __ xor_(t0, t0, t1);
6599 __ AllocateTwoByteConsString(v0, t2, t0, t1, &call_runtime);
6600 __ Branch(&allocated);
6613 Label first_prepared, second_prepared;
6614 __ bind(&string_add_flat_result);
6622 __ Xor(t3, t0, Operand(t1));
6624 __ Branch(&call_runtime,
ne, t3, Operand(zero_reg));
6630 Label skip_first_add;
6631 __ Branch(&skip_first_add,
ne, t4, Operand(zero_reg));
6634 __ bind(&skip_first_add);
6638 __ Branch(&call_runtime,
ne, t4, Operand(zero_reg));
6640 __ bind(&first_prepared);
6645 Label skip_second_add;
6646 __ Branch(&skip_second_add,
ne, t4, Operand(zero_reg));
6649 __ bind(&skip_second_add);
6653 __ Branch(&call_runtime,
ne, t4, Operand(zero_reg));
6655 __ bind(&second_prepared);
6657 Label non_ascii_string_add_flat_result;
6666 __ Branch(&non_ascii_string_add_flat_result,
eq, t4, Operand(zero_reg));
6668 __ AllocateAsciiString(v0, t2, t0, t1, t5, &call_runtime);
6680 __ IncrementCounter(counters->string_add_native(), 1, a2, a3);
6683 __ bind(&non_ascii_string_add_flat_result);
6684 __ AllocateTwoByteString(v0, t2, t0, t1, t5, &call_runtime);
6696 __ IncrementCounter(counters->string_add_native(), 1, a2, a3);
6700 __ bind(&call_runtime);
6701 __ TailCallRuntime(Runtime::kStringAdd, 2, 1);
6703 if (call_builtin.is_linked()) {
6704 __ bind(&call_builtin);
6710 void StringAddStub::GenerateConvertArgument(MacroAssembler* masm,
6719 Label not_string, done;
6720 __ JumpIfSmi(arg, ¬_string);
6721 __ GetObjectType(arg, scratch1, scratch1);
6726 __ bind(¬_string);
6736 __ mov(arg, scratch1);
6741 __ bind(¬_cached);
6742 __ JumpIfSmi(arg, slow);
6743 __ GetObjectType(arg, scratch1, scratch2);
6747 __ And(scratch2, scratch2, scratch4);
6748 __ Branch(slow,
ne, scratch2, Operand(scratch4));
6756 void ICCompareStub::GenerateSmis(MacroAssembler* masm) {
6760 __ JumpIfNotSmi(a2, &miss);
6762 if (GetCondition() ==
eq) {
6764 __ Subu(v0, a0, a1);
6769 __ Subu(v0, a1, a0);
6778 void ICCompareStub::GenerateHeapNumbers(MacroAssembler* masm) {
6782 Label unordered, maybe_undefined1, maybe_undefined2;
6784 __ And(a2, a1, Operand(a0));
6785 __ JumpIfSmi(a2, &generic_stub);
6787 __ GetObjectType(a0, a2, a2);
6789 __ GetObjectType(a1, a2, a2);
6795 CpuFeatures::Scope scope(
FPU);
6804 Label fpu_eq, fpu_lt;
6806 __ BranchF(&fpu_eq, &unordered,
eq,
f0,
f2);
6820 __ li(v0, Operand(
LESS));
6824 __ bind(&unordered);
6827 __ bind(&generic_stub);
6828 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET);
6830 __ bind(&maybe_undefined1);
6832 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
6833 __ Branch(&miss,
ne, a0, Operand(at));
6834 __ GetObjectType(a1, a2, a2);
6839 __ bind(&maybe_undefined2);
6841 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
6842 __ Branch(&unordered,
eq, a1, Operand(at));
6850 void ICCompareStub::GenerateSymbols(MacroAssembler* masm) {
6856 Register right = a0;
6861 __ JumpIfEitherSmi(left, right, &miss);
6869 __ And(tmp1, tmp1, Operand(tmp2));
6871 __ Branch(&miss,
eq, tmp1, Operand(zero_reg));
6879 __ Ret(
ne, left, Operand(right));
6888 void ICCompareStub::GenerateStrings(MacroAssembler* masm) {
6896 Register right = a0;
6904 __ JumpIfEitherSmi(left, right, &miss);
6913 __ Or(tmp3, tmp1, tmp2);
6915 __ Branch(&miss,
ne, tmp5, Operand(zero_reg));
6918 Label left_ne_right;
6921 __ Branch(&left_ne_right,
ne, left, Operand(right));
6923 __ mov(v0, zero_reg);
6924 __ bind(&left_ne_right);
6933 __ And(tmp3, tmp1, Operand(tmp2));
6936 __ Branch(&is_symbol,
eq, tmp5, Operand(zero_reg));
6942 __ bind(&is_symbol);
6947 __ JumpIfBothInstanceTypesAreNotSequentialAscii(
6948 tmp1, tmp2, tmp3, tmp4, &runtime);
6953 masm, left, right, tmp1, tmp2, tmp3);
6956 masm, left, right, tmp1, tmp2, tmp3, tmp4);
6961 __ Push(left, right);
6963 __ TailCallRuntime(Runtime::kStringEquals, 2, 1);
6965 __ TailCallRuntime(Runtime::kStringCompare, 2, 1);
6973 void ICCompareStub::GenerateObjects(MacroAssembler* masm) {
6976 __ And(a2, a1, Operand(a0));
6977 __ JumpIfSmi(a2, &miss);
6979 __ GetObjectType(a0, a2, a2);
6981 __ GetObjectType(a1, a2, a2);
6986 __ subu(v0, a0, a1);
6993 void ICCompareStub::GenerateKnownObjects(MacroAssembler* masm) {
6996 __ JumpIfSmi(a2, &miss);
6999 __ Branch(&miss,
ne, a2, Operand(known_map_));
7000 __ Branch(&miss,
ne, a3, Operand(known_map_));
7003 __ subu(v0, a0, a1);
7009 void ICCompareStub::GenerateMiss(MacroAssembler* masm) {
7012 ExternalReference miss =
7013 ExternalReference(IC_Utility(IC::kCompareIC_Miss), masm->isolate());
7019 __ addiu(
sp,
sp, -kPointerSize);
7042 __ Assert(
ne,
"Received invalid return address.", t0,
7043 Operand(reinterpret_cast<uint32_t>(
kZapValue)));
7050 ExternalReference
function) {
7051 __ li(t9, Operand(
function));
7058 __ Move(t9, target);
7059 __ AssertStackIsAligned();
7069 masm->bal(&find_ra);
7071 masm->bind(&find_ra);
7073 const int kNumInstructionsToJump = 6;
7074 masm->addiu(ra, ra, kNumInstructionsToJump * kPointerSize);
7079 Operand(reinterpret_cast<intptr_t>(GetCode().location()),
7080 RelocInfo::CODE_TARGET),
7085 ASSERT_EQ(kNumInstructionsToJump, masm->InstructionsGeneratedSince(&find_ra));
7093 Register properties,
7094 Handle<String>
name,
7095 Register scratch0) {
7101 for (
int i = 0; i < kInlinedProbes; i++) {
7104 Register index = scratch0;
7107 __ Subu(index, index, Operand(1));
7108 __ And(index, index, Operand(
7109 Smi::FromInt(name->Hash() + StringDictionary::GetProbeOffset(i))));
7113 __ sll(at, index, 1);
7114 __ Addu(index, index, at);
7116 Register entity_name = scratch0;
7119 Register tmp = properties;
7120 __ sll(scratch0, index, 1);
7121 __ Addu(tmp, properties, scratch0);
7124 ASSERT(!tmp.is(entity_name));
7125 __ LoadRoot(tmp, Heap::kUndefinedValueRootIndex);
7126 __ Branch(done,
eq, entity_name, Operand(tmp));
7128 if (i != kInlinedProbes - 1) {
7130 __ LoadRoot(tmp, Heap::kTheHoleValueRootIndex);
7133 __ Branch(miss,
eq, entity_name, Operand(Handle<String>(name)));
7136 __ Branch(&the_hole,
eq, entity_name, Operand(tmp));
7143 __ Branch(miss,
eq, scratch0, Operand(zero_reg));
7153 const int spill_mask =
7154 (ra.bit() | t2.bit() | t1.bit() | t0.bit() | a3.bit() |
7155 a2.bit() | a1.bit() | a0.bit() | v0.bit());
7157 __ MultiPush(spill_mask);
7159 __ li(a1, Operand(Handle<String>(name)));
7163 __ MultiPop(spill_mask);
7165 __ Branch(done,
eq, at, Operand(zero_reg));
7166 __ Branch(miss,
ne, at, Operand(zero_reg));
7180 Register scratch2) {
7181 ASSERT(!elements.is(scratch1));
7182 ASSERT(!elements.is(scratch2));
7183 ASSERT(!name.is(scratch1));
7184 ASSERT(!name.is(scratch2));
7187 if (FLAG_debug_code)
__ AbortIfNotString(name);
7192 __ Subu(scratch1, scratch1, Operand(1));
7197 for (
int i = 0; i < kInlinedProbes; i++) {
7204 ASSERT(StringDictionary::GetProbeOffset(i) <
7206 __ Addu(scratch2, scratch2, Operand(
7210 __ And(scratch2, scratch1, scratch2);
7216 __ sll(at, scratch2, 1);
7217 __ Addu(scratch2, scratch2, at);
7220 __ sll(at, scratch2, 2);
7221 __ Addu(scratch2, elements, at);
7223 __ Branch(done,
eq, name, Operand(at));
7226 const int spill_mask =
7227 (ra.bit() | t2.bit() | t1.bit() | t0.bit() |
7228 a3.bit() | a2.bit() | a1.bit() | a0.bit() | v0.bit()) &
7229 ~(scratch1.bit() | scratch2.bit());
7231 __ MultiPush(spill_mask);
7233 ASSERT(!elements.is(a1));
7235 __ Move(a0, elements);
7237 __ Move(a0, elements);
7242 __ mov(scratch2, a2);
7244 __ MultiPop(spill_mask);
7246 __ Branch(done,
ne, at, Operand(zero_reg));
7247 __ Branch(miss,
eq, at, Operand(zero_reg));
7263 Register result = v0;
7264 Register dictionary = a0;
7266 Register index = a2;
7269 Register undefined = t1;
7270 Register entry_key = t2;
7272 Label in_dictionary, maybe_in_dictionary, not_in_dictionary;
7276 __ Subu(mask, mask, Operand(1));
7280 __ LoadRoot(undefined, Heap::kUndefinedValueRootIndex);
7282 for (
int i = kInlinedProbes; i < kTotalProbes; i++) {
7289 ASSERT(StringDictionary::GetProbeOffset(i) <
7291 __ Addu(index, hash, Operand(
7294 __ mov(index, hash);
7297 __ And(index, mask, index);
7303 __ sll(index, index, 1);
7304 __ Addu(index, index, at);
7308 __ sll(index, index, 2);
7309 __ Addu(index, index, dictionary);
7313 __ Branch(¬_in_dictionary,
eq, entry_key, Operand(undefined));
7316 __ Branch(&in_dictionary,
eq, entry_key, Operand(key));
7324 __ Branch(&maybe_in_dictionary,
eq, result, Operand(zero_reg));
7328 __ bind(&maybe_in_dictionary);
7334 __ mov(result, zero_reg);
7337 __ bind(&in_dictionary);
7341 __ bind(¬_in_dictionary);
7343 __ mov(result, zero_reg);
7347 struct AheadOfTimeWriteBarrierStubList {
7348 Register object, value, address;
7352 #define REG(Name) { kRegister_ ## Name ## _Code }
7354 static const AheadOfTimeWriteBarrierStubList kAheadOfTime[] = {
7391 for (
const AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime;
7392 !entry->object.is(
no_reg);
7394 if (object_.
is(entry->object) &&
7395 value_.
is(entry->value) &&
7396 address_.
is(entry->address) &&
7397 remembered_set_action_ == entry->action &&
7413 stub1.GetCode()->set_is_pregenerated(
true);
7418 for (
const AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime;
7419 !entry->object.is(
no_reg);
7426 stub.GetCode()->set_is_pregenerated(
true);
7435 void RecordWriteStub::Generate(MacroAssembler* masm) {
7436 Label skip_to_incremental_noncompacting;
7437 Label skip_to_incremental_compacting;
7445 __ beq(zero_reg, zero_reg, &skip_to_incremental_noncompacting);
7447 __ beq(zero_reg, zero_reg, &skip_to_incremental_compacting);
7451 __ RememberedSetHelper(object_,
7459 __ bind(&skip_to_incremental_noncompacting);
7462 __ bind(&skip_to_incremental_compacting);
7473 void RecordWriteStub::GenerateIncremental(MacroAssembler* masm, Mode mode) {
7477 Label dont_need_remembered_set;
7479 __ lw(regs_.scratch0(),
MemOperand(regs_.address(), 0));
7480 __ JumpIfNotInNewSpace(regs_.scratch0(),
7482 &dont_need_remembered_set);
7484 __ CheckPageFlag(regs_.object(),
7488 &dont_need_remembered_set);
7492 CheckNeedsToInformIncrementalMarker(
7493 masm, kUpdateRememberedSetOnNoNeedToInformIncrementalMarker, mode);
7494 InformIncrementalMarker(masm, mode);
7495 regs_.Restore(masm);
7496 __ RememberedSetHelper(object_,
7502 __ bind(&dont_need_remembered_set);
7505 CheckNeedsToInformIncrementalMarker(
7506 masm, kReturnOnNoNeedToInformIncrementalMarker, mode);
7507 InformIncrementalMarker(masm, mode);
7508 regs_.Restore(masm);
7513 void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm, Mode mode) {
7514 regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode_);
7515 int argument_count = 3;
7516 __ PrepareCallCFunction(argument_count, regs_.scratch0());
7518 a0.is(regs_.address()) ? regs_.scratch0() : regs_.address();
7519 ASSERT(!address.is(regs_.object()));
7521 __ Move(address, regs_.address());
7522 __ Move(a0, regs_.object());
7524 __ Move(a1, address);
7529 __ li(a2, Operand(ExternalReference::isolate_address()));
7531 AllowExternalCallThatCantCauseGC scope(masm);
7534 ExternalReference::incremental_evacuation_record_write_function(
7540 ExternalReference::incremental_marking_record_write_function(
7544 regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode_);
7548 void RecordWriteStub::CheckNeedsToInformIncrementalMarker(
7549 MacroAssembler* masm,
7550 OnNoNeedToInformIncrementalMarker on_no_need,
7553 Label need_incremental;
7554 Label need_incremental_pop_scratch;
7558 __ JumpIfBlack(regs_.object(), regs_.scratch0(), regs_.scratch1(), &on_black);
7560 regs_.Restore(masm);
7561 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
7562 __ RememberedSetHelper(object_,
7574 __ lw(regs_.scratch0(),
MemOperand(regs_.address(), 0));
7577 Label ensure_not_white;
7579 __ CheckPageFlag(regs_.scratch0(),
7585 __ CheckPageFlag(regs_.object(),
7591 __ bind(&ensure_not_white);
7596 __ Push(regs_.object(), regs_.address());
7597 __ EnsureNotWhite(regs_.scratch0(),
7601 &need_incremental_pop_scratch);
7602 __ Pop(regs_.object(), regs_.address());
7604 regs_.Restore(masm);
7605 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
7606 __ RememberedSetHelper(object_,
7615 __ bind(&need_incremental_pop_scratch);
7616 __ Pop(regs_.object(), regs_.address());
7618 __ bind(&need_incremental);
7624 void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) {
7634 Label double_elements;
7636 Label slow_elements;
7637 Label fast_elements;
7639 __ CheckFastElements(a2, t1, &double_elements);
7641 __ JumpIfSmi(a0, &smi_element);
7642 __ CheckFastSmiElements(a2, t1, &fast_elements);
7646 __ bind(&slow_elements);
7648 __ Push(a1, a3, a0);
7652 __ TailCallRuntime(Runtime::kStoreArrayLiteralElement, 5, 1);
7655 __ bind(&fast_elements);
7658 __ Addu(t2, t1, t2);
7669 __ bind(&smi_element);
7672 __ Addu(t2, t1, t2);
7678 __ bind(&double_elements);
7680 __ StoreNumberToDoubleElements(a0, a3, a1, t1, t2, t3, t5, a2,
7691 #endif // V8_TARGET_ARCH_MIPS
bool FLAG_enable_slow_asserts
static const int kResourceDataOffset
static const int kCallerFPOffset
void GenerateFast(MacroAssembler *masm)
void GenerateSlow(MacroAssembler *masm, const RuntimeCallHelper &call_helper)
static const int kLengthOffset
static const int kBitFieldOffset
const int kCArgsSlotsSize
void GenerateFast(MacroAssembler *masm)
STATIC_CHECK((kStringRepresentationMask|kStringEncodingMask)==Internals::kFullStringRepresentationMask)
const intptr_t kSmiTagMask
static const int kCodeOffset
static const int kEvacuationCandidateMask
#define CHECK_EQ(expected, value)
static void LoadNumberAsInt32Double(MacroAssembler *masm, Register object, Destination destination, DwVfpRegister double_dst, Register dst1, Register dst2, Register heap_number_map, Register scratch1, Register scratch2, SwVfpRegister single_scratch, Label *not_int32)
void GenerateSlow(MacroAssembler *masm, const RuntimeCallHelper &call_helper)
static const int kCodeEntryOffset
static const int kMaxAsciiCharCode
static const int kPrototypeOrInitialMapOffset
#define COMPARE(asm_, compare_string)
static int SlotOffset(int index)
RecordWriteStub(Register object, Register value, Register address, RememberedSetAction remembered_set_action, SaveFPRegsMode fp_mode)
static const int kStaticOffsetsVectorSize
static const int kArgumentsObjectSize
static void GenerateFixedRegStubsAheadOfTime()
const uint32_t kTwoByteStringTag
const int kFailureTypeTagSize
static const uint32_t kExponentMask
static const char * GetName(TypeInfo type_info)
static Failure * InternalError()
static void GenerateCopyCharacters(MacroAssembler *masm, Register dest, Register src, Register count, Register scratch, bool ascii)
static const char * Name(Value tok)
static Smi * FromInt(int value)
void Generate(MacroAssembler *masm)
static void DoubleIs32BitInteger(MacroAssembler *masm, Register src1, Register src2, Register dst, Register scratch, Label *not_int32)
value format" "after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false, "print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false, "print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false, "report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true, "garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true, "flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true, "use incremental marking") DEFINE_bool(incremental_marking_steps, true, "do incremental marking steps") DEFINE_bool(trace_incremental_marking, false, "trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true, "Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false, "Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true, "use inline caching") DEFINE_bool(native_code_counters, false, "generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false, "Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true, "Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false, "Never perform compaction on full GC-testing only") DEFINE_bool(compact_code_space, true, "Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true, "Flush inline caches prior to mark compact collection and" "flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0, "Default seed for initializing random generator" "(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true, "allows verbose printing") DEFINE_bool(allow_natives_syntax, false, "allow natives syntax") DEFINE_bool(trace_sim, false, "Trace simulator execution") DEFINE_bool(check_icache, false, "Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0, "Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8, "Stack alingment in bytes in simulator(4 or 8, 8 is default)") DEFINE_bool(trace_exception, false, "print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false, "preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true, "randomize hashes to avoid predictable hash collisions" "(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0, "Fixed seed to use to hash property keys(0 means random)" "(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false, "activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true, "generate optimized regexp code") DEFINE_bool(testing_bool_flag, true, "testing_bool_flag") DEFINE_int(testing_int_flag, 13, "testing_int_flag") DEFINE_float(testing_float_flag, 2.5, "float-flag") DEFINE_string(testing_string_flag, "Hello, world!", "string-flag") DEFINE_int(testing_prng_seed, 42, "Seed used for threading test randomness") DEFINE_string(testing_serialization_file, "/tmp/serdes", "file in which to serialize heap") DEFINE_bool(help, false, "Print usage message, including flags, on console") DEFINE_bool(dump_counters, false, "Dump counters on exit") DEFINE_string(map_counters, "", "Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT, "Pass all remaining arguments to the script.Alias for\"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#43"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2#define FLAG_MODE_DEFINE_DEFAULTS#1"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flag-definitions.h"1#define FLAG_FULL(ftype, ctype, nam, def, cmt)#define FLAG_READONLY(ftype, ctype, nam, def, cmt)#define DEFINE_implication(whenflag, thenflag)#define DEFINE_bool(nam, def, cmt)#define DEFINE_int(nam, def, cmt)#define DEFINE_float(nam, def, cmt)#define DEFINE_string(nam, def, cmt)#define DEFINE_args(nam, def, cmt)#define FLAG DEFINE_bool(use_strict, false,"enforce strict mode") DEFINE_bool(es5_readonly, false,"activate correct semantics for inheriting readonliness") DEFINE_bool(es52_globals, false,"activate new semantics for global var declarations") DEFINE_bool(harmony_typeof, false,"enable harmony semantics for typeof") DEFINE_bool(harmony_scoping, false,"enable harmony block scoping") DEFINE_bool(harmony_modules, false,"enable harmony modules (implies block scoping)") DEFINE_bool(harmony_proxies, false,"enable harmony proxies") DEFINE_bool(harmony_collections, false,"enable harmony collections (sets, maps, and weak maps)") DEFINE_bool(harmony, false,"enable all harmony features (except typeof)") DEFINE_implication(harmony, harmony_scoping) DEFINE_implication(harmony, harmony_modules) DEFINE_implication(harmony, harmony_proxies) DEFINE_implication(harmony, harmony_collections) DEFINE_implication(harmony_modules, harmony_scoping) DEFINE_bool(packed_arrays, false,"optimizes arrays that have no holes") DEFINE_bool(smi_only_arrays, true,"tracks arrays with only smi values") DEFINE_bool(clever_optimizations, true,"Optimize object size, Array shift, DOM strings and string +") DEFINE_bool(unbox_double_arrays, true,"automatically unbox arrays of doubles") DEFINE_bool(string_slices, true,"use string slices") DEFINE_bool(crankshaft, true,"use crankshaft") DEFINE_string(hydrogen_filter,"","optimization filter") DEFINE_bool(use_range, true,"use hydrogen range analysis") DEFINE_bool(eliminate_dead_phis, true,"eliminate dead phis") DEFINE_bool(use_gvn, true,"use hydrogen global value numbering") DEFINE_bool(use_canonicalizing, true,"use hydrogen instruction canonicalizing") DEFINE_bool(use_inlining, true,"use function inlining") DEFINE_int(max_inlined_source_size, 600,"maximum source size in bytes considered for a single inlining") DEFINE_int(max_inlined_nodes, 196,"maximum number of AST nodes considered for a single inlining") DEFINE_int(max_inlined_nodes_cumulative, 196,"maximum cumulative number of AST nodes considered for inlining") DEFINE_bool(loop_invariant_code_motion, true,"loop invariant code motion") DEFINE_bool(collect_megamorphic_maps_from_stub_cache, true,"crankshaft harvests type feedback from stub cache") DEFINE_bool(hydrogen_stats, false,"print statistics for hydrogen") DEFINE_bool(trace_hydrogen, false,"trace generated hydrogen to file") DEFINE_string(trace_phase,"Z","trace generated IR for specified phases") DEFINE_bool(trace_inlining, false,"trace inlining decisions") DEFINE_bool(trace_alloc, false,"trace register allocator") DEFINE_bool(trace_all_uses, false,"trace all use positions") DEFINE_bool(trace_range, false,"trace range analysis") DEFINE_bool(trace_gvn, false,"trace global value numbering") DEFINE_bool(trace_representation, false,"trace representation types") DEFINE_bool(stress_pointer_maps, false,"pointer map for every instruction") DEFINE_bool(stress_environments, false,"environment for every instruction") DEFINE_int(deopt_every_n_times, 0,"deoptimize every n times a deopt point is passed") DEFINE_bool(trap_on_deopt, false,"put a break point before deoptimizing") DEFINE_bool(deoptimize_uncommon_cases, true,"deoptimize uncommon cases") DEFINE_bool(polymorphic_inlining, true,"polymorphic inlining") DEFINE_bool(use_osr, true,"use on-stack replacement") DEFINE_bool(array_bounds_checks_elimination, false,"perform array bounds checks elimination") DEFINE_bool(array_index_dehoisting, false,"perform array index dehoisting") DEFINE_bool(trace_osr, false,"trace on-stack replacement") DEFINE_int(stress_runs, 0,"number of stress runs") DEFINE_bool(optimize_closures, true,"optimize closures") DEFINE_bool(inline_construct, true,"inline constructor calls") DEFINE_bool(inline_arguments, true,"inline functions with arguments object") DEFINE_int(loop_weight, 1,"loop weight for representation inference") DEFINE_bool(optimize_for_in, true,"optimize functions containing for-in loops") DEFINE_bool(experimental_profiler, true,"enable all profiler experiments") DEFINE_bool(watch_ic_patching, false,"profiler considers IC stability") DEFINE_int(frame_count, 1,"number of stack frames inspected by the profiler") DEFINE_bool(self_optimization, false,"primitive functions trigger their own optimization") DEFINE_bool(direct_self_opt, false,"call recompile stub directly when self-optimizing") DEFINE_bool(retry_self_opt, false,"re-try self-optimization if it failed") DEFINE_bool(count_based_interrupts, false,"trigger profiler ticks based on counting instead of timing") DEFINE_bool(interrupt_at_exit, false,"insert an interrupt check at function exit") DEFINE_bool(weighted_back_edges, false,"weight back edges by jump distance for interrupt triggering") DEFINE_int(interrupt_budget, 5900,"execution budget before interrupt is triggered") DEFINE_int(type_info_threshold, 15,"percentage of ICs that must have type info to allow optimization") DEFINE_int(self_opt_count, 130,"call count before self-optimization") DEFINE_implication(experimental_profiler, watch_ic_patching) DEFINE_implication(experimental_profiler, self_optimization) DEFINE_implication(experimental_profiler, retry_self_opt) DEFINE_implication(experimental_profiler, count_based_interrupts) DEFINE_implication(experimental_profiler, interrupt_at_exit) DEFINE_implication(experimental_profiler, weighted_back_edges) DEFINE_bool(trace_opt_verbose, false,"extra verbose compilation tracing") DEFINE_implication(trace_opt_verbose, trace_opt) DEFINE_bool(debug_code, false,"generate extra code (assertions) for debugging") DEFINE_bool(code_comments, false,"emit comments in code disassembly") DEFINE_bool(enable_sse2, true,"enable use of SSE2 instructions if available") DEFINE_bool(enable_sse3, true,"enable use of SSE3 instructions if available") DEFINE_bool(enable_sse4_1, true,"enable use of SSE4.1 instructions if available") DEFINE_bool(enable_cmov, true,"enable use of CMOV instruction if available") DEFINE_bool(enable_rdtsc, true,"enable use of RDTSC instruction if available") DEFINE_bool(enable_sahf, true,"enable use of SAHF instruction if available (X64 only)") DEFINE_bool(enable_vfp3, true,"enable use of VFP3 instructions if available - this implies ""enabling ARMv7 instructions (ARM only)") DEFINE_bool(enable_armv7, true,"enable use of ARMv7 instructions if available (ARM only)") DEFINE_bool(enable_fpu, true,"enable use of MIPS FPU instructions if available (MIPS only)") DEFINE_string(expose_natives_as, NULL,"expose natives in global object") DEFINE_string(expose_debug_as, NULL,"expose debug in global object") DEFINE_bool(expose_gc, false,"expose gc extension") DEFINE_bool(expose_externalize_string, false,"expose externalize string extension") DEFINE_int(stack_trace_limit, 10,"number of stack frames to capture") DEFINE_bool(builtins_in_stack_traces, false,"show built-in functions in stack traces") DEFINE_bool(disable_native_files, false,"disable builtin natives files") DEFINE_bool(inline_new, true,"use fast inline allocation") DEFINE_bool(stack_trace_on_abort, true,"print a stack trace if an assertion failure occurs") DEFINE_bool(trace, false,"trace function calls") DEFINE_bool(mask_constants_with_cookie, true,"use random jit cookie to mask large constants") DEFINE_bool(lazy, true,"use lazy compilation") DEFINE_bool(trace_opt, false,"trace lazy optimization") DEFINE_bool(trace_opt_stats, false,"trace lazy optimization statistics") DEFINE_bool(opt, true,"use adaptive optimizations") DEFINE_bool(always_opt, false,"always try to optimize functions") DEFINE_bool(prepare_always_opt, false,"prepare for turning on always opt") DEFINE_bool(trace_deopt, false,"trace deoptimization") DEFINE_int(min_preparse_length, 1024,"minimum length for automatic enable preparsing") DEFINE_bool(always_full_compiler, false,"try to use the dedicated run-once backend for all code") DEFINE_bool(trace_bailout, false,"print reasons for falling back to using the classic V8 backend") DEFINE_bool(compilation_cache, true,"enable compilation cache") DEFINE_bool(cache_prototype_transitions, true,"cache prototype transitions") DEFINE_bool(trace_debug_json, false,"trace debugging JSON request/response") DEFINE_bool(debugger_auto_break, true,"automatically set the debug break flag when debugger commands are ""in the queue") DEFINE_bool(enable_liveedit, true,"enable liveedit experimental feature") DEFINE_bool(break_on_abort, true,"always cause a debug break before aborting") DEFINE_int(stack_size, kPointerSize *123,"default size of stack region v8 is allowed to use (in kBytes)") DEFINE_int(max_stack_trace_source_length, 300,"maximum length of function source code printed in a stack trace.") DEFINE_bool(always_inline_smi_code, false,"always inline smi code in non-opt code") DEFINE_int(max_new_space_size, 0,"max size of the new generation (in kBytes)") DEFINE_int(max_old_space_size, 0,"max size of the old generation (in Mbytes)") DEFINE_int(max_executable_size, 0,"max size of executable memory (in Mbytes)") DEFINE_bool(gc_global, false,"always perform global GCs") DEFINE_int(gc_interval,-1,"garbage collect after <n> allocations") DEFINE_bool(trace_gc, false,"print one trace line following each garbage collection") DEFINE_bool(trace_gc_nvp, false,"print one detailed trace line in name=value format ""after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false,"print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false,"print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false,"report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true,"garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true,"flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true,"use incremental marking") DEFINE_bool(incremental_marking_steps, true,"do incremental marking steps") DEFINE_bool(trace_incremental_marking, false,"trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true,"Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false,"Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true,"use inline caching") DEFINE_bool(native_code_counters, false,"generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false,"Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true,"Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false,"Never perform compaction on full GC - testing only") DEFINE_bool(compact_code_space, true,"Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true,"Flush inline caches prior to mark compact collection and ""flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0,"Default seed for initializing random generator ""(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true,"allows verbose printing") DEFINE_bool(allow_natives_syntax, false,"allow natives syntax") DEFINE_bool(trace_sim, false,"Trace simulator execution") DEFINE_bool(check_icache, false,"Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0,"Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8,"Stack alingment in bytes in simulator (4 or 8, 8 is default)") DEFINE_bool(trace_exception, false,"print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false,"preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true,"randomize hashes to avoid predictable hash collisions ""(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0,"Fixed seed to use to hash property keys (0 means random)""(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false,"activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true,"generate optimized regexp code") DEFINE_bool(testing_bool_flag, true,"testing_bool_flag") DEFINE_int(testing_int_flag, 13,"testing_int_flag") DEFINE_float(testing_float_flag, 2.5,"float-flag") DEFINE_string(testing_string_flag,"Hello, world!","string-flag") DEFINE_int(testing_prng_seed, 42,"Seed used for threading test randomness") DEFINE_string(testing_serialization_file,"/tmp/serdes","file in which to serialize heap") DEFINE_bool(help, false,"Print usage message, including flags, on console") DEFINE_bool(dump_counters, false,"Dump counters on exit") DEFINE_string(map_counters,"","Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT,"Pass all remaining arguments to the script. Alias for \"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#47"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2 namespace{struct Flag{enum FlagType{TYPE_BOOL, TYPE_INT, TYPE_FLOAT, TYPE_STRING, TYPE_ARGS} name
static const int kDataOffset
static const int kGlobalReceiverOffset
const int kNumCalleeSavedFPU
static void GenerateCopyCharactersLong(MacroAssembler *masm, Register dest, Register src, Register count, Register scratch1, Register scratch2, Register scratch3, Register scratch4, Register scratch5, int flags)
void Generate(MacroAssembler *masm)
static Failure * OutOfMemoryException()
static void LoadNumberAsInt32(MacroAssembler *masm, Register object, Register dst, Register heap_number_map, Register scratch1, Register scratch2, Register scratch3, DwVfpRegister double_scratch, Label *not_int32)
static void GenerateHashGetHash(MacroAssembler *masm, Register hash)
static const int kExponentBias
static Handle< Object > UninitializedSentinel(Isolate *isolate)
static bool IsSupported(CpuFeature f)
static Failure * Exception()
void Generate(MacroAssembler *masm)
virtual bool IsPregenerated()
void Generate(MacroAssembler *masm)
static const int kCallerSPOffset
bool CanBeUndetectable() const
#define ASSERT(condition)
static void LoadOperands(MacroAssembler *masm, FloatingPointHelper::Destination destination, Register heap_number_map, Register scratch1, Register scratch2, Label *not_number)
const RegList kJSCallerSaved
WriteInt32ToHeapNumberStub(Register the_int, Register the_heap_number, Register scratch)
const int kPointerSizeLog2
static const int kInstanceSizeOffset
static void GenerateCompareFlatAsciiStrings(MacroAssembler *masm, Register left, Register right, Register scratch1, Register scratch2, Register scratch3, Register scratch4)
static Handle< Object > MegamorphicSentinel(Isolate *isolate)
static const char * GetName(TypeInfo type_info)
const uint32_t kStringRepresentationMask
MemOperand GlobalObjectOperand()
static const int kEntrySize
const intptr_t kObjectAlignmentMask
static const int kGlobalContextOffset
MemOperand ContextOperand(Register context, int index)
static const int kContextOffset
const uint32_t kAsciiDataHintTag
const uint32_t kShortExternalStringMask
static void GenerateNegativeLookup(MacroAssembler *masm, Label *miss, Label *done, Register receiver, Register properties, Handle< String > name, Register scratch0)
static const int kLastSubjectOffset
static const int kZeroHash
const RegList kCalleeSavedFPU
void Generate(MacroAssembler *masm)
static const int kHashFieldOffset
static const int kLastCaptureCountOffset
const RegList kCallerSavedFPU
static const int kFirstOffset
static const int kMinLength
StringDictionaryLookupStub(LookupMode mode)
const uint32_t kNotStringTag
static const int kParentOffset
static const int kNonMantissaBitsInTopWord
static const int kLiteralsOffset
DwVfpRegister DoubleRegister
static const int kArgumentsObjectSizeStrict
STATIC_ASSERT((FixedDoubleArray::kHeaderSize &kDoubleAlignmentMask)==0)
static const int kLengthOffset
static const int kCapacityOffset
const uint32_t kIsSymbolMask
static const int kExponentShift
const intptr_t kFailureTagMask
static const int kValueOffset
const int kFailureTagSize
static void GenerateFlatAsciiStringEquals(MacroAssembler *masm, Register left, Register right, Register scratch1, Register scratch2, Register scratch3)
static const int kIrregexpCaptureCountOffset
static const int kInputOffset
static bool IsBitOp(Value op)
const uint32_t kIsIndirectStringMask
const bool IsMipsSoftFloatABI
void Generate(MacroAssembler *masm)
static void LoadSmis(MacroAssembler *masm, Destination destination, Register scratch1, Register scratch2)
virtual bool IsPregenerated()
static void CallCCodeForDoubleOperation(MacroAssembler *masm, Token::Value op, Register heap_number_result, Register scratch)
static const int kStringWrapperSafeForDefaultValueOf
const RegList kCalleeSaved
const uint32_t kAsciiDataHintMask
static void ConvertNumberToInt32(MacroAssembler *masm, Register object, Register dst, Register heap_number_map, Register scratch1, Register scratch2, Register scratch3, DwVfpRegister double_scratch, Label *not_int32)
void Generate(MacroAssembler *masm)
static const int kPropertiesOffset
static void PatchBranchIntoNop(MacroAssembler *masm, int pos)
static const int kMinLength
const uint32_t kShortExternalStringTag
static void GenerateHashAddCharacter(MacroAssembler *masm, Register hash, Register character)
static void Generate(MacroAssembler *masm, Register string, Register index, Register result, Label *call_runtime)
static const int kHeaderSize
static const int kNextFunctionLinkOffset
void Generate(MacroAssembler *masm)
static int SizeFor(int length)
static const int kElementsOffset
const uint32_t kStringTag
static bool IsEqualityOp(Value op)
static const int kOffsetOffset
friend class BlockTrampolinePoolScope
void Generate(MacroAssembler *masm)
static const int kLengthOffset
static int SizeFor(int length)
void GenerateSlow(MacroAssembler *masm, const RuntimeCallHelper &call_helper)
virtual void Generate(MacroAssembler *masm)
static const int kLastMatchOverhead
static const int kHeaderSize
const intptr_t kPointerAlignmentMask
void Generate(MacroAssembler *masm)
void GenerateCall(MacroAssembler *masm, ExternalReference function)
static const int kMapOffset
static const int kMantissaBitsInTopWord
bool is(Register reg) const
static const int kSkipEvacuationSlotsRecordingMask
const uint32_t kIsNotStringMask
const int kNumCalleeSaved
static void GenerateFixedRegStubsAheadOfTime()
const uint32_t kSlicedNotConsMask
static const int kLengthOffset
static void ConvertIntToDouble(MacroAssembler *masm, Register int_scratch, Destination destination, DwVfpRegister double_dst, Register dst1, Register dst2, Register scratch2, SwVfpRegister single_scratch)
void Generate(MacroAssembler *masm)
void Generate(MacroAssembler *masm)
static const int kSecondOffset
static void GeneratePositiveLookup(MacroAssembler *masm, Label *miss, Label *done, Register elements, Register name, Register r0, Register r1)
static const int kCallerFPOffset
static const int kArgumentsLengthIndex
MemOperand FieldMemOperand(Register object, int offset)
const intptr_t kObjectAlignment
static const int kContextOffset
static const int kFunctionOffset
static const int kFirstCaptureOffset
static const uint32_t kHashBitMask
static const uint32_t kSignMask
static const int kLastInputOffset
static const int kHeaderSize
void GenerateBody(MacroAssembler *masm, bool is_construct)
static const int kDataAsciiCodeOffset
#define ASSERT_EQ(v1, v2)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination trace on stack replacement optimize closures functions with arguments object optimize functions containing for in loops profiler considers IC stability primitive functions trigger their own optimization re try self optimization if it failed insert an interrupt check at function exit execution budget before interrupt is triggered call count before self optimization self_optimization count_based_interrupts weighted_back_edges trace_opt emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 enable use of ARMv7 instructions if enable use of MIPS FPU instructions if NULL
static void GenerateAheadOfTime()
static const int kArgumentsCalleeIndex
static const int kIsUndetectable
static const int kHeaderSize
void Generate(MacroAssembler *masm)
void GenerateFast(MacroAssembler *masm)
static void GenerateLookupNumberStringCache(MacroAssembler *masm, Register object, Register result, Register scratch1, Register scratch2, Register scratch3, bool object_is_smi, Label *not_found)
static const int kInstrSize
static const int kDataTagOffset
static const int kPrototypeOffset
static void GenerateFixedRegStubsAheadOfTime()
static const int kElementsStartOffset
static void GenerateTwoCharacterSymbolTableProbe(MacroAssembler *masm, Register c1, Register c2, Register scratch1, Register scratch2, Register scratch3, Register scratch4, Register scratch5, Label *not_found)
static const int kMaxLength
static const int kValueOffset
bool Contains(Type type) const
const uint32_t kSymbolTag
const uint32_t kAsciiStringTag
static const int kConstructStubOffset
static const int kExponentBits
static const int kHashShift
static const int kSharedFunctionInfoOffset
void Generate(MacroAssembler *masm)
static const int kBitField2Offset
void Generate(MacroAssembler *masm)
CEntryStub(int result_size, SaveFPRegsMode save_doubles=kDontSaveFPRegs)
void check(i::Vector< const char > string)
static const int kExponentOffset
static const int kValueOffset
static const int kDataUC16CodeOffset
void Generate(MacroAssembler *masm)
StoreBufferOverflowStub(SaveFPRegsMode save_fp)
virtual bool IsPregenerated()
static void GenerateHashInit(MacroAssembler *masm, Register hash, Register character)
static bool IsOrderedRelationalCompareOp(Value op)
const uint32_t kStringEncodingMask
static const int kInstanceTypeOffset
static const int kIndexOffset
void Generate(MacroAssembler *masm)
static const int kMantissaOffset
void Generate(MacroAssembler *masm)