30 #if defined(V8_TARGET_ARCH_ARM)
40 #define __ ACCESS_MASM(masm)
42 static void EmitIdenticalObjectComparison(MacroAssembler* masm,
46 static void EmitSmiNonsmiComparison(MacroAssembler* masm,
52 static void EmitTwoNonNanDoubleComparison(MacroAssembler* masm,
Condition cond);
53 static void EmitStrictTwoHeapObjectCompare(MacroAssembler* masm,
59 static void EmitCheckForHeapNumber(MacroAssembler* masm, Register operand,
60 Register scratch1, Register scratch2,
61 Label* not_a_heap_number) {
63 __ LoadRoot(scratch2, Heap::kHeapNumberMapRootIndex);
64 __ cmp(scratch1, scratch2);
65 __ b(
ne, not_a_heap_number);
71 Label check_heap_number, call_builtin;
72 __ JumpIfNotSmi(
r0, &check_heap_number);
75 __ bind(&check_heap_number);
76 EmitCheckForHeapNumber(masm,
r0,
r1,
ip, &call_builtin);
79 __ bind(&call_builtin);
114 __ LoadRoot(
r1, Heap::kEmptyFixedArrayRootIndex);
115 __ LoadRoot(
r2, Heap::kTheHoleValueRootIndex);
116 __ LoadRoot(
r4, Heap::kUndefinedValueRootIndex);
136 __ LoadRoot(
r4, Heap::kFalseValueRootIndex);
138 __ TailCallRuntime(Runtime::kNewClosure, 3, 1);
159 __ LoadRoot(
r1, Heap::kFunctionContextMapRootIndex);
173 __ LoadRoot(
r1, Heap::kUndefinedValueRootIndex);
174 for (
int i = Context::MIN_CONTEXT_SLOTS; i < length; i++) {
185 __ TailCallRuntime(Runtime::kNewFunctionContext, 1, 1);
208 __ LoadRoot(
r2, Heap::kBlockContextMapRootIndex);
217 Label after_sentinel;
218 __ JumpIfNotSmi(
r3, &after_sentinel);
219 if (FLAG_debug_code) {
220 const char* message =
"Expected 0 as a Smi sentinel";
221 __ cmp(
r3, Operand::Zero());
222 __ Assert(
eq, message);
227 __ bind(&after_sentinel);
237 __ LoadRoot(
r1, Heap::kTheHoleValueRootIndex);
238 for (
int i = 0; i < slots_; i++) {
249 __ TailCallRuntime(Runtime::kPushBlockContext, 2, 1);
253 static void GenerateFastCloneShallowArrayCommon(
254 MacroAssembler* masm,
264 int elements_size = 0;
268 : FixedArray::SizeFor(length);
274 __ AllocateInNewSpace(size,
293 __ add(
r2,
r0, Operand(JSArray::kSize));
316 __ CompareRoot(
r3, Heap::kUndefinedValueRootIndex);
317 __ b(
eq, &slow_case);
321 Label double_elements, check_fast_elements;
324 __ CompareRoot(
r0, Heap::kFixedCOWArrayMapRootIndex);
325 __ b(
ne, &check_fast_elements);
326 GenerateFastCloneShallowArrayCommon(masm, 0,
332 __ bind(&check_fast_elements);
333 __ CompareRoot(
r0, Heap::kFixedArrayMapRootIndex);
334 __ b(
ne, &double_elements);
335 GenerateFastCloneShallowArrayCommon(masm, length_,
341 __ bind(&double_elements);
346 if (FLAG_debug_code) {
350 message =
"Expected (writable) fixed array";
351 expected_map_index = Heap::kFixedArrayMapRootIndex;
353 message =
"Expected (writable) fixed double array";
354 expected_map_index = Heap::kFixedDoubleArrayMapRootIndex;
357 message =
"Expected copy-on-write fixed array";
358 expected_map_index = Heap::kFixedCOWArrayMapRootIndex;
363 __ CompareRoot(
r3, expected_map_index);
364 __ Assert(
eq, message);
368 GenerateFastCloneShallowArrayCommon(masm, length_, mode, &slow_case);
375 __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1);
394 __ CompareRoot(
r3, Heap::kUndefinedValueRootIndex);
395 __ b(
eq, &slow_case);
403 __ b(
ne, &slow_case);
414 __ add(
sp,
sp, Operand(4 * kPointerSize));
418 __ TailCallRuntime(Runtime::kCreateObjectLiteralShallow, 4, 1);
427 class ConvertToDoubleStub :
public CodeStub {
429 ConvertToDoubleStub(Register result_reg_1,
430 Register result_reg_2,
432 Register scratch_reg)
433 : result1_(result_reg_1),
434 result2_(result_reg_2),
436 zeros_(scratch_reg) { }
445 class ModeBits:
public BitField<OverwriteMode, 0, 2> {};
446 class OpBits:
public BitField<Token::Value, 2, 14> {};
448 Major MajorKey() {
return ConvertToDouble; }
451 return result1_.code() +
452 (result2_.code() << 4) +
453 (source_.code() << 8) +
454 (zeros_.code() << 12);
457 void Generate(MacroAssembler* masm);
461 void ConvertToDoubleStub::Generate(MacroAssembler* masm) {
462 Register exponent = result1_;
463 Register mantissa = result2_;
479 __ cmp(source_, Operand(1));
480 __ b(
gt, ¬_special);
483 const uint32_t exponent_word_for_1 =
485 __ orr(exponent, exponent, Operand(exponent_word_for_1),
LeaveCC,
eq);
490 __ bind(¬_special);
493 __ CountLeadingZeros(zeros_, source_, mantissa);
500 __ add(mantissa, mantissa, Operand(fudge));
503 Operand(mantissa,
LSL, HeapNumber::kExponentShift));
505 __ add(zeros_, zeros_, Operand(1));
507 __ mov(source_, Operand(source_,
LSL, zeros_));
523 CpuFeatures::Scope scope(
VFP3);
537 __ mov(scratch1, Operand(
r0));
538 ConvertToDoubleStub stub1(
r3,
r2, scratch1, scratch2);
540 __ Call(stub1.GetCode());
542 __ mov(scratch1, Operand(
r1));
543 ConvertToDoubleStub stub2(
r1,
r0, scratch1, scratch2);
544 __ Call(stub2.GetCode());
551 MacroAssembler* masm,
553 Register heap_number_map,
559 LoadNumber(masm, destination,
560 r0,
d7,
r2,
r3, heap_number_map, scratch1, scratch2, slow);
563 LoadNumber(masm, destination,
564 r1,
d6,
r0,
r1, heap_number_map, scratch1, scratch2, slow);
568 void FloatingPointHelper::LoadNumber(MacroAssembler* masm,
569 Destination destination,
574 Register heap_number_map,
578 if (FLAG_debug_code) {
579 __ AbortIfNotRootValue(heap_number_map,
580 Heap::kHeapNumberMapRootIndex,
581 "HeapNumberMap register clobbered.");
587 __ UntagAndJumpIfSmi(scratch1,
object, &is_smi);
589 __ JumpIfNotHeapNumber(
object, heap_number_map, scratch1, not_number);
594 CpuFeatures::Scope scope(
VFP3);
608 CpuFeatures::Scope scope(
VFP3);
610 __ vmov(dst.high(), scratch1);
611 __ vcvt_f64_s32(dst, dst.high());
614 __ vmov(dst1, dst2, dst);
619 __ mov(scratch1, Operand(
object));
620 ConvertToDoubleStub stub(dst2, dst1, scratch1, scratch2);
622 __ Call(stub.GetCode());
633 Register heap_number_map,
637 DwVfpRegister double_scratch,
639 if (FLAG_debug_code) {
640 __ AbortIfNotRootValue(heap_number_map,
641 Heap::kHeapNumberMapRootIndex,
642 "HeapNumberMap register clobbered.");
645 Label not_in_int32_range;
647 __ UntagAndJumpIfSmi(dst,
object, &done);
649 __ cmp(scratch1, heap_number_map);
650 __ b(
ne, not_number);
651 __ ConvertToInt32(
object,
656 ¬_in_int32_range);
659 __ bind(¬_in_int32_range);
663 __ EmitOutOfInt32RangeTruncate(dst,
672 Register int_scratch,
673 Destination destination,
674 DwVfpRegister double_dst,
678 SwVfpRegister single_scratch) {
679 ASSERT(!int_scratch.is(scratch2));
680 ASSERT(!int_scratch.is(dst1));
681 ASSERT(!int_scratch.is(dst2));
686 CpuFeatures::Scope scope(
VFP3);
687 __ vmov(single_scratch, int_scratch);
688 __ vcvt_f64_s32(double_dst, single_scratch);
690 __ vmov(dst1, dst2, double_dst);
693 Label fewer_than_20_useful_bits;
699 __ cmp(int_scratch, Operand::Zero());
700 __ mov(dst2, int_scratch);
701 __ mov(dst1, int_scratch);
707 __ rsb(int_scratch, int_scratch, Operand::Zero(),
SetCC,
mi);
712 __ CountLeadingZeros(dst1, int_scratch, scratch2);
713 __ rsb(dst1, dst1, Operand(31));
717 __ Bfi(dst2, scratch2, scratch2,
721 __ mov(scratch2, Operand(1));
722 __ bic(int_scratch, int_scratch, Operand(scratch2,
LSL, dst1));
727 __ b(
mi, &fewer_than_20_useful_bits);
729 __ orr(dst2, dst2, Operand(int_scratch,
LSR, scratch2));
730 __ rsb(scratch2, scratch2, Operand(32));
731 __ mov(dst1, Operand(int_scratch,
LSL, scratch2));
734 __ bind(&fewer_than_20_useful_bits);
736 __ mov(scratch2, Operand(int_scratch,
LSL, scratch2));
737 __ orr(dst2, dst2, scratch2);
739 __ mov(dst1, Operand::Zero());
747 Destination destination,
748 DwVfpRegister double_dst,
751 Register heap_number_map,
754 SwVfpRegister single_scratch,
756 ASSERT(!scratch1.is(
object) && !scratch2.is(
object));
757 ASSERT(!scratch1.is(scratch2));
758 ASSERT(!heap_number_map.is(
object) &&
759 !heap_number_map.is(scratch1) &&
760 !heap_number_map.is(scratch2));
762 Label done, obj_is_not_smi;
764 __ JumpIfNotSmi(
object, &obj_is_not_smi);
765 __ SmiUntag(scratch1,
object);
767 scratch2, single_scratch);
770 __ bind(&obj_is_not_smi);
771 if (FLAG_debug_code) {
772 __ AbortIfNotRootValue(heap_number_map,
773 Heap::kHeapNumberMapRootIndex,
774 "HeapNumberMap register clobbered.");
776 __ JumpIfNotHeapNumber(
object, heap_number_map, scratch1, not_int32);
780 CpuFeatures::Scope scope(
VFP3);
796 __ vmov(dst1, dst2, double_dst);
800 ASSERT(!scratch1.is(
object) && !scratch2.is(
object));
806 __ orr(scratch1, scratch1, Operand(dst2));
807 __ cmp(scratch1, Operand::Zero());
825 Register heap_number_map,
829 DwVfpRegister double_scratch,
832 ASSERT(!scratch1.is(
object) && !scratch2.is(
object) && !scratch3.is(
object));
833 ASSERT(!scratch1.is(scratch2) &&
834 !scratch1.is(scratch3) &&
835 !scratch2.is(scratch3));
839 __ UntagAndJumpIfSmi(dst,
object, &done);
841 if (FLAG_debug_code) {
842 __ AbortIfNotRootValue(heap_number_map,
843 Heap::kHeapNumberMapRootIndex,
844 "HeapNumberMap register clobbered.");
846 __ JumpIfNotHeapNumber(
object, heap_number_map, scratch1, not_int32);
851 CpuFeatures::Scope scope(
VFP3);
852 SwVfpRegister single_scratch = double_scratch.low();
867 __ vmov(dst, single_scratch);
876 __ orr(dst, scratch2, Operand(dst));
877 __ cmp(dst, Operand::Zero());
887 __ mov(dst, Operand(dst,
LSR, scratch3));
889 __ rsb(scratch3, scratch3, Operand(32));
890 __ orr(dst, dst, Operand(scratch2,
LSL, scratch3));
910 HeapNumber::kExponentShift,
929 __ sub(tmp, scratch, Operand(src1,
LSR, 31));
930 __ cmp(tmp, Operand(30));
933 __ tst(src2, Operand(0x3fffff));
952 __ rsb(scratch, scratch, Operand(32));
953 __ mov(src2, Operand(1));
954 __ mov(src1, Operand(src2,
LSL, scratch));
955 __ sub(src1, src1, Operand(1));
962 MacroAssembler* masm,
964 Register heap_number_result,
979 __ PrepareCallCFunction(0, 2, scratch);
980 if (masm->use_eabi_hardfloat()) {
981 CpuFeatures::Scope scope(
VFP3);
986 AllowExternalCallThatCantCauseGC scope(masm);
988 ExternalReference::double_fp_operation(op, masm->isolate()), 0, 2);
992 if (masm->use_eabi_hardfloat()) {
993 CpuFeatures::Scope scope(
VFP3);
1001 __ mov(
r0, Operand(heap_number_result));
1008 if (the_int_.
is(
r1) && the_heap_number_.
is(
r0) && scratch_.
is(
r2)) {
1011 if (the_int_.
is(
r2) && the_heap_number_.
is(
r0) && scratch_.
is(
r3)) {
1024 stub1.GetCode()->set_is_pregenerated(
true);
1025 stub2.GetCode()->set_is_pregenerated(
true);
1030 void WriteInt32ToHeapNumberStub::Generate(MacroAssembler* masm) {
1031 Label max_negative_int;
1036 __ cmp(the_int_, Operand(0x80000000u));
1037 __ b(
eq, &max_negative_int);
1040 uint32_t non_smi_exponent =
1042 __ mov(scratch_, Operand(non_smi_exponent));
1051 ASSERT(((1 << HeapNumber::kExponentShift) & non_smi_exponent) != 0);
1053 __ orr(scratch_, scratch_, Operand(the_int_,
LSR, shift_distance));
1056 __ mov(scratch_, Operand(the_int_,
LSL, 32 - shift_distance));
1061 __ bind(&max_negative_int);
1078 static void EmitIdenticalObjectComparison(MacroAssembler* masm,
1081 bool never_nan_nan) {
1082 Label not_identical;
1083 Label heap_number, return_equal;
1085 __ b(
ne, ¬_identical);
1089 if (cond !=
eq || !never_nan_nan) {
1094 if (cond ==
lt || cond ==
gt) {
1099 __ b(
eq, &heap_number);
1107 if (cond ==
le || cond ==
ge) {
1109 __ b(
ne, &return_equal);
1110 __ LoadRoot(
r2, Heap::kUndefinedValueRootIndex);
1112 __ b(
ne, &return_equal);
1126 __ bind(&return_equal);
1129 }
else if (cond ==
gt) {
1136 if (cond !=
eq || !never_nan_nan) {
1140 if (cond !=
lt && cond !=
gt) {
1141 __ bind(&heap_number);
1152 __ cmp(
r3, Operand(-1));
1153 __ b(
ne, &return_equal);
1178 __ bind(¬_identical);
1183 static void EmitSmiNonsmiComparison(MacroAssembler* masm,
1190 (lhs.is(
r1) && rhs.is(
r0)));
1193 __ JumpIfSmi(rhs, &rhs_is_smi);
1214 CpuFeatures::Scope scope(
VFP3);
1215 __ SmiToDoubleVFPRegister(lhs,
d7,
r7,
s15);
1222 __ mov(
r7, Operand(lhs));
1223 ConvertToDoubleStub stub1(
r3,
r2,
r7,
r6);
1224 __ Call(stub1.GetCode());
1232 __ jmp(lhs_not_nan);
1234 __ bind(&rhs_is_smi);
1253 CpuFeatures::Scope scope(
VFP3);
1258 __ SmiToDoubleVFPRegister(rhs,
d6,
r7,
s13);
1264 __ mov(
r7, Operand(rhs));
1265 ConvertToDoubleStub stub2(
r1,
r0,
r7,
r6);
1266 __ Call(stub2.GetCode());
1273 void EmitNanCheck(MacroAssembler* masm, Label* lhs_not_nan,
Condition cond) {
1275 Register rhs_exponent = exp_first ?
r0 :
r1;
1276 Register lhs_exponent = exp_first ?
r2 :
r3;
1277 Register rhs_mantissa = exp_first ?
r1 :
r0;
1278 Register lhs_mantissa = exp_first ?
r3 :
r2;
1279 Label one_is_nan, neither_is_nan;
1283 HeapNumber::kExponentShift,
1286 __ cmp(
r4, Operand(-1));
1287 __ b(
ne, lhs_not_nan);
1291 __ b(
ne, &one_is_nan);
1293 __ b(
ne, &one_is_nan);
1295 __ bind(lhs_not_nan);
1298 HeapNumber::kExponentShift,
1301 __ cmp(
r4, Operand(-1));
1302 __ b(
ne, &neither_is_nan);
1306 __ b(
ne, &one_is_nan);
1308 __ b(
eq, &neither_is_nan);
1310 __ bind(&one_is_nan);
1313 if (cond ==
lt || cond ==
le) {
1320 __ bind(&neither_is_nan);
1325 static void EmitTwoNonNanDoubleComparison(MacroAssembler* masm,
1328 Register rhs_exponent = exp_first ?
r0 :
r1;
1329 Register lhs_exponent = exp_first ?
r2 :
r3;
1330 Register rhs_mantissa = exp_first ?
r1 :
r0;
1331 Register lhs_mantissa = exp_first ?
r3 :
r2;
1337 __ cmp(rhs_mantissa, Operand(lhs_mantissa));
1338 __ orr(
r0, rhs_mantissa, Operand(lhs_mantissa),
LeaveCC,
ne);
1342 __ sub(
r0, rhs_exponent, Operand(lhs_exponent),
SetCC);
1363 __ PrepareCallCFunction(0, 2,
r5);
1364 if (masm->use_eabi_hardfloat()) {
1365 CpuFeatures::Scope scope(
VFP3);
1370 AllowExternalCallThatCantCauseGC scope(masm);
1371 __ CallCFunction(ExternalReference::compare_doubles(masm->isolate()),
1379 static void EmitStrictTwoHeapObjectCompare(MacroAssembler* masm,
1383 (lhs.is(
r1) && rhs.is(
r0)));
1389 Label first_non_object;
1393 __ b(
lt, &first_non_object);
1396 Label return_not_equal;
1397 __ bind(&return_not_equal);
1400 __ bind(&first_non_object);
1403 __ b(
eq, &return_not_equal);
1406 __ b(
ge, &return_not_equal);
1410 __ b(
eq, &return_not_equal);
1418 __ b(
ne, &return_not_equal);
1423 static void EmitCheckForTwoHeapNumbers(MacroAssembler* masm,
1426 Label* both_loaded_as_doubles,
1427 Label* not_heap_numbers,
1430 (lhs.is(
r1) && rhs.is(
r0)));
1433 __ b(
ne, not_heap_numbers);
1441 CpuFeatures::Scope scope(
VFP3);
1450 __ jmp(both_loaded_as_doubles);
1455 static void EmitCheckForSymbolsOrObjects(MacroAssembler* masm,
1458 Label* possible_strings,
1459 Label* not_both_strings) {
1461 (lhs.is(
r1) && rhs.is(
r0)));
1468 __ b(
ne, &object_test);
1470 __ b(
eq, possible_strings);
1472 __ b(
ge, not_both_strings);
1474 __ b(
eq, possible_strings);
1481 __ bind(&object_test);
1483 __ b(
lt, not_both_strings);
1485 __ b(
lt, not_both_strings);
1508 Register number_string_cache = result;
1509 Register mask = scratch3;
1512 __ LoadRoot(number_string_cache, Heap::kNumberStringCacheRootIndex);
1519 __ sub(mask, mask, Operand(1));
1525 Isolate* isolate = masm->isolate();
1527 Label load_result_from_cache;
1528 if (!object_is_smi) {
1529 __ JumpIfSmi(
object, &is_smi);
1531 CpuFeatures::Scope scope(
VFP3);
1534 Heap::kHeapNumberMapRootIndex,
1542 __ ldm(
ia, scratch1, scratch1.bit() | scratch2.bit());
1543 __ eor(scratch1, scratch1, Operand(scratch2));
1544 __ and_(scratch1, scratch1, Operand(mask));
1549 number_string_cache,
1552 Register probe = mask;
1555 __ JumpIfSmi(probe, not_found);
1560 __ VFPCompareAndSetFlags(
d0,
d1);
1561 __ b(
ne, not_found);
1562 __ b(&load_result_from_cache);
1569 Register scratch = scratch1;
1570 __ and_(scratch, mask, Operand(
object,
ASR, 1));
1574 number_string_cache,
1578 Register probe = mask;
1580 __ cmp(
object, probe);
1581 __ b(
ne, not_found);
1584 __ bind(&load_result_from_cache);
1587 __ IncrementCounter(isolate->counters()->number_to_string_native(),
1594 void NumberToStringStub::Generate(MacroAssembler* masm) {
1601 __ add(
sp,
sp, Operand(1 * kPointerSize));
1606 __ TailCallRuntime(Runtime::kNumberToStringSkipCache, 1, 1);
1618 Label not_smis, both_loaded_as_doubles, lhs_not_nan;
1620 if (include_smi_compare_) {
1621 Label not_two_smis, smi_done;
1623 __ JumpIfNotSmi(
r2, ¬_two_smis);
1627 __ bind(¬_two_smis);
1628 }
else if (FLAG_debug_code) {
1631 __ Assert(
ne,
"CompareStub: unexpected smi operands.");
1639 EmitIdenticalObjectComparison(masm, &slow, cc_, never_nan_nan_);
1645 __ and_(
r2, lhs_, Operand(rhs_));
1646 __ JumpIfNotSmi(
r2, ¬_smis);
1656 EmitSmiNonsmiComparison(masm, lhs_, rhs_, &lhs_not_nan, &slow, strict_);
1658 __ bind(&both_loaded_as_doubles);
1661 Isolate* isolate = masm->isolate();
1663 __ bind(&lhs_not_nan);
1664 CpuFeatures::Scope scope(
VFP3);
1667 __ VFPCompareAndSetFlags(
d7,
d6);
1679 if (cc_ ==
lt || cc_ ==
le) {
1688 EmitNanCheck(masm, &lhs_not_nan, cc_);
1691 EmitTwoNonNanDoubleComparison(masm, cc_);
1700 EmitStrictTwoHeapObjectCompare(masm, lhs_, rhs_);
1703 Label check_for_symbols;
1704 Label flat_string_check;
1709 EmitCheckForTwoHeapNumbers(masm,
1712 &both_loaded_as_doubles,
1714 &flat_string_check);
1716 __ bind(&check_for_symbols);
1719 if (cc_ ==
eq && !strict_) {
1723 EmitCheckForSymbolsOrObjects(masm, lhs_, rhs_, &flat_string_check, &slow);
1728 __ bind(&flat_string_check);
1730 __ JumpIfNonSmisNotBothSequentialAsciiStrings(lhs_, rhs_,
r2,
r3, &slow);
1732 __ IncrementCounter(isolate->counters()->string_compare_native(), 1,
r2,
r3);
1753 __ Push(lhs_, rhs_);
1757 native = strict_ ? Builtins::STRICT_EQUALS : Builtins::EQUALS;
1761 if (cc_ ==
lt || cc_ ==
le) {
1783 CpuFeatures::Scope scope(
VFP3);
1786 const Register map =
r9.
is(tos_) ?
r7 :
r9;
1789 CheckOddball(masm,
UNDEFINED, Heap::kUndefinedValueRootIndex,
false);
1792 CheckOddball(masm,
BOOLEAN, Heap::kFalseValueRootIndex,
false);
1793 CheckOddball(masm,
BOOLEAN, Heap::kTrueValueRootIndex,
true);
1796 CheckOddball(masm,
NULL_TYPE, Heap::kNullValueRootIndex,
false);
1805 __ JumpIfSmi(tos_, &patch);
1836 Label not_heap_number;
1837 __ CompareRoot(map, Heap::kHeapNumberMapRootIndex);
1838 __ b(
ne, ¬_heap_number);
1840 __ VFPCompareAndSetFlags(
d1, 0.0);
1847 __ bind(¬_heap_number);
1851 GenerateTypeTransition(masm);
1855 void ToBooleanStub::CheckOddball(MacroAssembler* masm,
1861 __ LoadRoot(
ip, value);
1873 void ToBooleanStub::GenerateTypeTransition(MacroAssembler* masm) {
1875 __ mov(
r3, Operand(tos_));
1882 __ TailCallExternalReference(
1883 ExternalReference(IC_Utility(IC::kToBoolean_Patch), masm->isolate()),
1895 CpuFeatures::Scope scope(
VFP3);
1902 const int argument_count = 1;
1903 const int fp_argument_count = 0;
1904 const Register scratch =
r1;
1906 AllowExternalCallThatCantCauseGC scope(masm);
1907 __ PrepareCallCFunction(argument_count, fp_argument_count, scratch);
1908 __ mov(
r0, Operand(ExternalReference::isolate_address()));
1910 ExternalReference::store_buffer_overflow_function(masm->isolate()),
1913 CpuFeatures::Scope scope(
VFP3);
1924 void UnaryOpStub::PrintName(StringStream* stream) {
1926 const char* overwrite_name =
NULL;
1931 stream->Add(
"UnaryOpStub_%s_%s_%s",
1939 void UnaryOpStub::Generate(MacroAssembler* masm) {
1940 switch (operand_type_) {
1942 GenerateTypeTransition(masm);
1945 GenerateSmiStub(masm);
1948 GenerateHeapNumberStub(masm);
1951 GenerateGenericStub(masm);
1957 void UnaryOpStub::GenerateTypeTransition(MacroAssembler* masm) {
1964 __ TailCallExternalReference(
1965 ExternalReference(IC_Utility(IC::kUnaryOp_Patch), masm->isolate()), 4, 1);
1970 void UnaryOpStub::GenerateSmiStub(MacroAssembler* masm) {
1973 GenerateSmiStubSub(masm);
1975 case Token::BIT_NOT:
1976 GenerateSmiStubBitNot(masm);
1984 void UnaryOpStub::GenerateSmiStubSub(MacroAssembler* masm) {
1985 Label non_smi, slow;
1986 GenerateSmiCodeSub(masm, &non_smi, &slow);
1989 GenerateTypeTransition(masm);
1993 void UnaryOpStub::GenerateSmiStubBitNot(MacroAssembler* masm) {
1995 GenerateSmiCodeBitNot(masm, &non_smi);
1997 GenerateTypeTransition(masm);
2001 void UnaryOpStub::GenerateSmiCodeSub(MacroAssembler* masm,
2004 __ JumpIfNotSmi(
r0, non_smi);
2016 void UnaryOpStub::GenerateSmiCodeBitNot(MacroAssembler* masm,
2018 __ JumpIfNotSmi(
r0, non_smi);
2028 void UnaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) {
2031 GenerateHeapNumberStubSub(masm);
2033 case Token::BIT_NOT:
2034 GenerateHeapNumberStubBitNot(masm);
2042 void UnaryOpStub::GenerateHeapNumberStubSub(MacroAssembler* masm) {
2043 Label non_smi, slow, call_builtin;
2044 GenerateSmiCodeSub(masm, &non_smi, &call_builtin);
2046 GenerateHeapNumberCodeSub(masm, &slow);
2048 GenerateTypeTransition(masm);
2049 __ bind(&call_builtin);
2050 GenerateGenericCodeFallback(masm);
2054 void UnaryOpStub::GenerateHeapNumberStubBitNot(MacroAssembler* masm) {
2055 Label non_smi, slow;
2056 GenerateSmiCodeBitNot(masm, &non_smi);
2058 GenerateHeapNumberCodeBitNot(masm, &slow);
2060 GenerateTypeTransition(masm);
2063 void UnaryOpStub::GenerateHeapNumberCodeSub(MacroAssembler* masm,
2065 EmitCheckForHeapNumber(masm,
r0,
r1,
r6, slow);
2072 Label slow_allocate_heapnumber, heapnumber_allocated;
2073 __ AllocateHeapNumber(
r1,
r2,
r3,
r6, &slow_allocate_heapnumber);
2074 __ jmp(&heapnumber_allocated);
2076 __ bind(&slow_allocate_heapnumber);
2080 __ CallRuntime(Runtime::kNumberAlloc, 0);
2085 __ bind(&heapnumber_allocated);
2097 void UnaryOpStub::GenerateHeapNumberCodeBitNot(
2098 MacroAssembler* masm, Label* slow) {
2101 EmitCheckForHeapNumber(masm,
r0,
r1,
r6, slow);
2109 __ b(
mi, &try_float);
2116 __ bind(&try_float);
2118 Label slow_allocate_heapnumber, heapnumber_allocated;
2120 __ AllocateHeapNumber(
r2,
r3,
r4,
r6, &slow_allocate_heapnumber);
2121 __ jmp(&heapnumber_allocated);
2123 __ bind(&slow_allocate_heapnumber);
2127 __ CallRuntime(Runtime::kNumberAlloc, 0);
2139 __ bind(&heapnumber_allocated);
2145 CpuFeatures::Scope scope(
VFP3);
2154 WriteInt32ToHeapNumberStub stub(
r1,
r0,
r2);
2155 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET);
2158 __ bind(&impossible);
2159 if (FLAG_debug_code) {
2160 __ stop(
"Incorrect assumption in bit-not stub");
2166 void UnaryOpStub::GenerateGenericStub(MacroAssembler* masm) {
2169 GenerateGenericStubSub(masm);
2171 case Token::BIT_NOT:
2172 GenerateGenericStubBitNot(masm);
2180 void UnaryOpStub::GenerateGenericStubSub(MacroAssembler* masm) {
2181 Label non_smi, slow;
2182 GenerateSmiCodeSub(masm, &non_smi, &slow);
2184 GenerateHeapNumberCodeSub(masm, &slow);
2186 GenerateGenericCodeFallback(masm);
2190 void UnaryOpStub::GenerateGenericStubBitNot(MacroAssembler* masm) {
2191 Label non_smi, slow;
2192 GenerateSmiCodeBitNot(masm, &non_smi);
2194 GenerateHeapNumberCodeBitNot(masm, &slow);
2196 GenerateGenericCodeFallback(masm);
2200 void UnaryOpStub::GenerateGenericCodeFallback(MacroAssembler* masm) {
2207 case Token::BIT_NOT:
2216 void BinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) {
2226 __ TailCallExternalReference(
2227 ExternalReference(IC_Utility(IC::kBinaryOp_Patch),
2234 void BinaryOpStub::GenerateTypeTransitionWithSavedArgs(
2235 MacroAssembler* masm) {
2240 void BinaryOpStub::Generate(MacroAssembler* masm) {
2243 AllowStubCallsScope allow_stub_calls(masm,
true);
2245 switch (operands_type_) {
2247 GenerateTypeTransition(masm);
2250 GenerateSmiStub(masm);
2253 GenerateInt32Stub(masm);
2256 GenerateHeapNumberStub(masm);
2259 GenerateOddballStub(masm);
2262 GenerateBothStringStub(masm);
2265 GenerateStringStub(masm);
2268 GenerateGeneric(masm);
2276 void BinaryOpStub::PrintName(StringStream* stream) {
2278 const char* overwrite_name;
2283 default: overwrite_name =
"UnknownOverwrite";
break;
2285 stream->Add(
"BinaryOpStub_%s_%s_%s",
2292 void BinaryOpStub::GenerateSmiSmiOperation(MacroAssembler* masm) {
2294 Register right =
r0;
2295 Register scratch1 =
r7;
2296 Register scratch2 =
r9;
2301 Label not_smi_result;
2304 __ add(right, left, Operand(right),
SetCC);
2306 __ sub(right, right, Operand(left));
2309 __ sub(right, left, Operand(right),
SetCC);
2311 __ sub(right, left, Operand(right));
2316 __ SmiUntag(
ip, right);
2320 __ smull(scratch1, scratch2, left,
ip);
2323 __ mov(
ip, Operand(scratch1,
ASR, 31));
2324 __ cmp(
ip, Operand(scratch2));
2325 __ b(
ne, ¬_smi_result);
2327 __ cmp(scratch1, Operand(0));
2332 __ add(scratch2, right, Operand(left),
SetCC);
2340 __ JumpIfNotPowerOfTwoOrZero(right, scratch1, ¬_smi_result);
2342 __ orr(scratch2, scratch1, Operand(0x80000000u));
2343 __ tst(left, scratch2);
2344 __ b(
ne, ¬_smi_result);
2347 __ CountLeadingZeros(scratch1, scratch1, scratch2);
2348 __ rsb(scratch1, scratch1, Operand(31));
2349 __ mov(right, Operand(left,
LSR, scratch1));
2354 __ orr(scratch1, left, Operand(right));
2356 __ b(
ne, ¬_smi_result);
2359 __ JumpIfNotPowerOfTwoOrZero(right, scratch1, ¬_smi_result);
2362 __ and_(right, left, Operand(scratch1));
2366 __ orr(right, left, Operand(right));
2369 case Token::BIT_AND:
2370 __ and_(right, left, Operand(right));
2373 case Token::BIT_XOR:
2374 __ eor(right, left, Operand(right));
2379 __ GetLeastBitsFromSmi(scratch1, right, 5);
2380 __ mov(right, Operand(left,
ASR, scratch1));
2388 __ SmiUntag(scratch1, left);
2389 __ GetLeastBitsFromSmi(scratch2, right, 5);
2390 __ mov(scratch1, Operand(scratch1,
LSR, scratch2));
2393 __ tst(scratch1, Operand(0xc0000000));
2394 __ b(
ne, ¬_smi_result);
2396 __ SmiTag(right, scratch1);
2401 __ SmiUntag(scratch1, left);
2402 __ GetLeastBitsFromSmi(scratch2, right, 5);
2403 __ mov(scratch1, Operand(scratch1,
LSL, scratch2));
2405 __ add(scratch2, scratch1, Operand(0x40000000),
SetCC);
2406 __ b(
mi, ¬_smi_result);
2407 __ SmiTag(right, scratch1);
2413 __ bind(¬_smi_result);
2417 void BinaryOpStub::GenerateFPOperation(MacroAssembler* masm,
2420 Label* gc_required) {
2422 Register right =
r0;
2423 Register scratch1 =
r7;
2424 Register scratch2 =
r9;
2425 Register scratch3 =
r4;
2427 ASSERT(smi_operands || (not_numbers !=
NULL));
2428 if (smi_operands && FLAG_debug_code) {
2429 __ AbortIfNotSmi(left);
2430 __ AbortIfNotSmi(right);
2433 Register heap_number_map =
r6;
2434 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
2451 Register result =
r5;
2452 GenerateHeapResultAllocation(
2453 masm, result, heap_number_map, scratch1, scratch2, gc_required);
2472 CpuFeatures::Scope scope(
VFP3);
2500 if (FLAG_debug_code) {
2501 __ stop(
"Unreachable code.");
2507 case Token::BIT_XOR:
2508 case Token::BIT_AND:
2513 __ SmiUntag(
r3, left);
2514 __ SmiUntag(
r2, right);
2537 Label result_not_a_smi;
2542 case Token::BIT_XOR:
2545 case Token::BIT_AND:
2550 __ GetLeastBitsFromInt32(
r2,
r2, 5);
2555 __ GetLeastBitsFromInt32(
r2,
r2, 5);
2562 __ b(
mi, &result_not_a_smi);
2564 __ b(
mi, not_numbers);
2569 __ GetLeastBitsFromInt32(
r2,
r2, 5);
2578 __ b(
mi, &result_not_a_smi);
2583 __ bind(&result_not_a_smi);
2584 Register result =
r5;
2586 __ AllocateHeapNumber(
2587 result, scratch1, scratch2, heap_number_map, gc_required);
2589 GenerateHeapResultAllocation(
2590 masm, result, heap_number_map, scratch1, scratch2, gc_required);
2603 CpuFeatures::Scope scope(
VFP3);
2605 if (op_ == Token::SHR) {
2616 WriteInt32ToHeapNumberStub stub(
r2,
r0,
r3);
2617 __ TailCallStub(&stub);
2631 void BinaryOpStub::GenerateSmiCode(
2632 MacroAssembler* masm,
2635 SmiCodeGenerateHeapNumberResults allow_heapnumber_results) {
2639 Register right =
r0;
2640 Register scratch1 =
r7;
2643 __ orr(scratch1, left, Operand(right));
2645 __ JumpIfNotSmi(scratch1, ¬_smis);
2648 GenerateSmiSmiOperation(masm);
2652 if (allow_heapnumber_results == ALLOW_HEAPNUMBER_RESULTS) {
2653 GenerateFPOperation(masm,
true, use_runtime, gc_required);
2659 void BinaryOpStub::GenerateSmiStub(MacroAssembler* masm) {
2660 Label not_smis, call_runtime;
2665 GenerateSmiCode(masm, &call_runtime,
NULL, NO_HEAPNUMBER_RESULTS);
2669 GenerateSmiCode(masm,
2672 ALLOW_HEAPNUMBER_RESULTS);
2677 GenerateTypeTransition(masm);
2679 __ bind(&call_runtime);
2680 GenerateCallRuntime(masm);
2684 void BinaryOpStub::GenerateStringStub(MacroAssembler* masm) {
2689 GenerateAddStrings(masm);
2690 GenerateTypeTransition(masm);
2694 void BinaryOpStub::GenerateBothStringStub(MacroAssembler* masm) {
2703 Register right =
r0;
2706 __ JumpIfSmi(left, &call_runtime);
2708 __ b(
ge, &call_runtime);
2711 __ JumpIfSmi(right, &call_runtime);
2713 __ b(
ge, &call_runtime);
2716 GenerateRegisterArgsPush(masm);
2717 __ TailCallStub(&string_add_stub);
2719 __ bind(&call_runtime);
2720 GenerateTypeTransition(masm);
2724 void BinaryOpStub::GenerateInt32Stub(MacroAssembler* masm) {
2728 Register right =
r0;
2729 Register scratch1 =
r7;
2730 Register scratch2 =
r9;
2731 DwVfpRegister double_scratch =
d0;
2732 SwVfpRegister single_scratch =
s3;
2734 Register heap_number_result =
no_reg;
2735 Register heap_number_map =
r6;
2736 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
2746 __ orr(scratch1, left, right);
2747 __ JumpIfNotSmi(scratch1, &skip);
2748 GenerateSmiSmiOperation(masm);
2790 CpuFeatures::Scope scope(
VFP3);
2791 Label return_heap_number;
2824 __ b(
ne, &transition);
2828 __ vmov(scratch1, single_scratch);
2829 __ add(scratch2, scratch1, Operand(0x40000000),
SetCC);
2831 __ b(
mi, &return_heap_number);
2834 __ cmp(scratch1, Operand::Zero());
2835 __ b(
ne, ¬_zero);
2838 __ b(
ne, &return_heap_number);
2842 __ SmiTag(
r0, scratch1);
2848 __ bind(&return_heap_number);
2854 heap_number_result =
r5;
2855 GenerateHeapResultAllocation(masm,
2863 __ mov(
r0, heap_number_result);
2875 Label pop_and_call_runtime;
2878 heap_number_result =
r5;
2879 GenerateHeapResultAllocation(masm,
2884 &pop_and_call_runtime);
2891 masm, op_, heap_number_result, scratch1);
2892 if (FLAG_debug_code) {
2893 __ stop(
"Unreachable code.");
2896 __ bind(&pop_and_call_runtime);
2898 __ b(&call_runtime);
2905 case Token::BIT_XOR:
2906 case Token::BIT_AND:
2910 Label return_heap_number;
2911 Register scratch3 =
r5;
2940 case Token::BIT_XOR:
2943 case Token::BIT_AND:
2947 __ and_(
r2,
r2, Operand(0x1f));
2951 __ and_(
r2,
r2, Operand(0x1f));
2960 __ b(
mi, (result_type_ <= BinaryOpIC::INT32)
2962 : &return_heap_number);
2964 __ b(
mi, (result_type_ <= BinaryOpIC::INT32)
2970 __ and_(
r2,
r2, Operand(0x1f));
2978 __ add(scratch1,
r2, Operand(0x40000000),
SetCC);
2980 __ b(
mi, &return_heap_number);
2985 __ bind(&return_heap_number);
2986 heap_number_result =
r5;
2987 GenerateHeapResultAllocation(masm,
2995 CpuFeatures::Scope scope(
VFP3);
2996 if (op_ != Token::SHR) {
2998 __ vmov(double_scratch.low(),
r2);
2999 __ vcvt_f64_s32(double_scratch, double_scratch.low());
3002 __ vmov(double_scratch.low(),
r2);
3003 __ vcvt_f64_u32(double_scratch, double_scratch.low());
3009 __ mov(
r0, heap_number_result);
3015 WriteInt32ToHeapNumberStub stub(
r2,
r0,
r3);
3016 __ TailCallStub(&stub);
3029 if (transition.is_linked() ||
3030 ((op_ ==
Token::DIV) && (result_type_ <= BinaryOpIC::INT32))) {
3031 __ bind(&transition);
3032 GenerateTypeTransition(masm);
3035 __ bind(&call_runtime);
3036 GenerateCallRuntime(masm);
3040 void BinaryOpStub::GenerateOddballStub(MacroAssembler* masm) {
3046 GenerateAddStrings(masm);
3051 __ CompareRoot(
r1, Heap::kUndefinedValueRootIndex);
3056 __ LoadRoot(
r1, Heap::kNanValueRootIndex);
3060 __ CompareRoot(
r0, Heap::kUndefinedValueRootIndex);
3065 __ LoadRoot(
r0, Heap::kNanValueRootIndex);
3069 GenerateHeapNumberStub(masm);
3073 void BinaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) {
3075 GenerateFPOperation(masm,
false, &call_runtime, &call_runtime);
3077 __ bind(&call_runtime);
3078 GenerateCallRuntime(masm);
3082 void BinaryOpStub::GenerateGeneric(MacroAssembler* masm) {
3083 Label call_runtime, call_string_add_or_runtime;
3085 GenerateSmiCode(masm, &call_runtime, &call_runtime, ALLOW_HEAPNUMBER_RESULTS);
3087 GenerateFPOperation(masm,
false, &call_string_add_or_runtime, &call_runtime);
3089 __ bind(&call_string_add_or_runtime);
3091 GenerateAddStrings(masm);
3094 __ bind(&call_runtime);
3095 GenerateCallRuntime(masm);
3099 void BinaryOpStub::GenerateAddStrings(MacroAssembler* masm) {
3101 Label left_not_string, call_runtime;
3104 Register right =
r0;
3107 __ JumpIfSmi(left, &left_not_string);
3109 __ b(
ge, &left_not_string);
3112 GenerateRegisterArgsPush(masm);
3113 __ TailCallStub(&string_add_left_stub);
3116 __ bind(&left_not_string);
3117 __ JumpIfSmi(right, &call_runtime);
3119 __ b(
ge, &call_runtime);
3122 GenerateRegisterArgsPush(masm);
3123 __ TailCallStub(&string_add_right_stub);
3126 __ bind(&call_runtime);
3130 void BinaryOpStub::GenerateCallRuntime(MacroAssembler* masm) {
3131 GenerateRegisterArgsPush(masm);
3151 case Token::BIT_AND:
3154 case Token::BIT_XOR:
3172 void BinaryOpStub::GenerateHeapResultAllocation(MacroAssembler* masm,
3174 Register heap_number_map,
3177 Label* gc_required) {
3183 Label skip_allocation, allocated;
3187 __ JumpIfNotSmi(overwritable_operand, &skip_allocation);
3189 __ AllocateHeapNumber(
3190 result, scratch1, scratch2, heap_number_map, gc_required);
3192 __ bind(&skip_allocation);
3194 __ mov(result, Operand(overwritable_operand));
3195 __ bind(&allocated);
3198 __ AllocateHeapNumber(
3199 result, scratch1, scratch2, heap_number_map, gc_required);
3204 void BinaryOpStub::GenerateRegisterArgsPush(MacroAssembler* masm) {
3215 Label input_not_smi;
3218 Label invalid_cache;
3219 const Register scratch0 =
r9;
3220 const Register scratch1 =
r7;
3221 const Register cache_entry =
r0;
3222 const bool tagged = (argument_type_ ==
TAGGED);
3225 CpuFeatures::Scope scope(
VFP3);
3229 __ JumpIfNotSmi(
r0, &input_not_smi);
3233 __ IntegerToDoubleConversionWithVFP3(
r0,
r3,
r2);
3236 __ bind(&input_not_smi);
3240 Heap::kHeapNumberMapRootIndex,
3260 __ And(
r1,
r1, Operand(TranscendentalCache::SubCache::kCacheSize - 1));
3265 Isolate* isolate = masm->isolate();
3266 ExternalReference cache_array =
3267 ExternalReference::transcendental_cache_array_address(isolate);
3268 __ mov(cache_entry, Operand(cache_array));
3270 int cache_array_index
3271 = type_ *
sizeof(isolate->transcendental_cache()->caches_[0]);
3272 __ ldr(cache_entry,
MemOperand(cache_entry, cache_array_index));
3276 __ b(
eq, &invalid_cache);
3280 { TranscendentalCache::SubCache::Element test_elem[2];
3281 char* elem_start =
reinterpret_cast<char*
>(&test_elem[0]);
3282 char* elem2_start =
reinterpret_cast<char*
>(&test_elem[1]);
3283 char* elem_in0 =
reinterpret_cast<char*
>(&(test_elem[0].in[0]));
3284 char* elem_in1 =
reinterpret_cast<char*
>(&(test_elem[0].in[1]));
3285 char* elem_out =
reinterpret_cast<char*
>(&(test_elem[0].output));
3286 CHECK_EQ(12, elem2_start - elem_start);
3287 CHECK_EQ(0, elem_in0 - elem_start);
3295 __ add(cache_entry, cache_entry, Operand(
r1,
LSL, 2));
3300 __ b(
ne, &calculate);
3302 Counters* counters = masm->isolate()->counters();
3303 __ IncrementCounter(
3304 counters->transcendental_cache_hit(), 1, scratch0, scratch1);
3316 __ bind(&calculate);
3317 Counters* counters = masm->isolate()->counters();
3318 __ IncrementCounter(
3319 counters->transcendental_cache_miss(), 1, scratch0, scratch1);
3321 __ bind(&invalid_cache);
3322 ExternalReference runtime_function =
3323 ExternalReference(RuntimeFunction(), masm->isolate());
3324 __ TailCallExternalReference(runtime_function, 1, 1);
3327 CpuFeatures::Scope scope(
VFP3);
3336 __ push(cache_entry);
3337 GenerateCallCFunction(masm, scratch0);
3338 __ GetCFunctionDoubleResult(
d2);
3342 __ pop(cache_entry);
3343 __ LoadRoot(
r5, Heap::kHeapNumberMapRootIndex);
3344 __ AllocateHeapNumber(
r6, scratch0, scratch1,
r5, &no_update);
3349 __ bind(&invalid_cache);
3352 __ LoadRoot(
r5, Heap::kHeapNumberMapRootIndex);
3353 __ AllocateHeapNumber(
r0, scratch0, scratch1,
r5, &skip_cache);
3358 __ CallRuntime(RuntimeFunction(), 1);
3363 __ bind(&skip_cache);
3366 GenerateCallCFunction(masm, scratch0);
3367 __ GetCFunctionDoubleResult(
d2);
3368 __ bind(&no_update);
3377 __ mov(scratch0, Operand(4 * kPointerSize));
3379 __ CallRuntimeSaveDoubles(Runtime::kAllocateInNewSpace);
3386 void TranscendentalCacheStub::GenerateCallCFunction(MacroAssembler* masm,
3388 Isolate* isolate = masm->isolate();
3391 __ PrepareCallCFunction(0, 1, scratch);
3392 if (masm->use_eabi_hardfloat()) {
3397 AllowExternalCallThatCantCauseGC scope(masm);
3400 __ CallCFunction(ExternalReference::math_sin_double_function(isolate),
3404 __ CallCFunction(ExternalReference::math_cos_double_function(isolate),
3408 __ CallCFunction(ExternalReference::math_tan_double_function(isolate),
3412 __ CallCFunction(ExternalReference::math_log_double_function(isolate),
3432 return Runtime::kAbort;
3438 __ TailCallRuntime(Runtime::kStackGuard, 0, 1);
3443 __ TailCallRuntime(Runtime::kInterrupt, 0, 1);
3448 CpuFeatures::Scope vfp3_scope(
VFP3);
3449 const Register base =
r1;
3450 const Register exponent =
r2;
3451 const Register heapnumbermap =
r5;
3452 const Register heapnumber =
r0;
3457 const SwVfpRegister single_scratch =
s0;
3458 const Register scratch =
r9;
3459 const Register scratch2 =
r7;
3461 Label call_runtime, done, int_exponent;
3463 Label base_is_smi, unpack_exponent;
3470 __ LoadRoot(heapnumbermap, Heap::kHeapNumberMapRootIndex);
3472 __ UntagAndJumpIfSmi(scratch, base, &base_is_smi);
3474 __ cmp(scratch, heapnumbermap);
3475 __ b(
ne, &call_runtime);
3478 __ jmp(&unpack_exponent);
3480 __ bind(&base_is_smi);
3481 __ vmov(single_scratch, scratch);
3482 __ vcvt_f64_s32(double_base, single_scratch);
3483 __ bind(&unpack_exponent);
3485 __ UntagAndJumpIfSmi(scratch, exponent, &int_exponent);
3488 __ cmp(scratch, heapnumbermap);
3489 __ b(
ne, &call_runtime);
3490 __ vldr(double_exponent,
3492 }
else if (exponent_type_ ==
TAGGED) {
3494 __ UntagAndJumpIfSmi(scratch, exponent, &int_exponent);
3496 __ vldr(double_exponent,
3500 if (exponent_type_ !=
INTEGER) {
3501 Label int_exponent_convert;
3503 __ vcvt_u32_f64(single_scratch, double_exponent);
3506 __ vcvt_f64_u32(double_scratch, single_scratch);
3507 __ VFPCompareAndSetFlags(double_scratch, double_exponent);
3508 __ b(
eq, &int_exponent_convert);
3514 Label not_plus_half;
3517 __ vmov(double_scratch, 0.5);
3518 __ VFPCompareAndSetFlags(double_exponent, double_scratch);
3519 __ b(
ne, ¬_plus_half);
3524 __ VFPCompareAndSetFlags(double_base, double_scratch);
3525 __ vneg(double_result, double_scratch,
eq);
3529 __ vadd(double_scratch, double_base, kDoubleRegZero);
3530 __ vsqrt(double_result, double_scratch);
3533 __ bind(¬_plus_half);
3534 __ vmov(double_scratch, -0.5);
3535 __ VFPCompareAndSetFlags(double_exponent, double_scratch);
3536 __ b(
ne, &call_runtime);
3541 __ VFPCompareAndSetFlags(double_base, double_scratch);
3542 __ vmov(double_result, kDoubleRegZero,
eq);
3546 __ vadd(double_scratch, double_base, kDoubleRegZero);
3547 __ vmov(double_result, 1);
3548 __ vsqrt(double_scratch, double_scratch);
3549 __ vdiv(double_result, double_result, double_scratch);
3555 AllowExternalCallThatCantCauseGC scope(masm);
3556 __ PrepareCallCFunction(0, 2, scratch);
3557 __ SetCallCDoubleArguments(double_base, double_exponent);
3559 ExternalReference::power_double_double_function(masm->isolate()),
3563 __ GetCFunctionDoubleResult(double_result);
3566 __ bind(&int_exponent_convert);
3567 __ vcvt_u32_f64(single_scratch, double_exponent);
3568 __ vmov(scratch, single_scratch);
3572 __ bind(&int_exponent);
3575 if (exponent_type_ ==
INTEGER) {
3576 __ mov(scratch, exponent);
3579 __ mov(exponent, scratch);
3581 __ vmov(double_scratch, double_base);
3582 __ vmov(double_result, 1.0);
3585 __ cmp(scratch, Operand(0));
3590 __ bind(&while_true);
3591 __ mov(scratch, Operand(scratch,
ASR, 1),
SetCC);
3592 __ vmul(double_result, double_result, double_scratch,
cs);
3593 __ vmul(double_scratch, double_scratch, double_scratch,
ne);
3594 __ b(
ne, &while_true);
3596 __ cmp(exponent, Operand(0));
3598 __ vmov(double_scratch, 1.0);
3599 __ vdiv(double_result, double_scratch, double_result);
3602 __ VFPCompareAndSetFlags(double_result, 0.0);
3606 __ vmov(single_scratch, exponent);
3607 __ vcvt_f64_s32(double_exponent, single_scratch);
3610 Counters* counters = masm->isolate()->counters();
3613 __ bind(&call_runtime);
3614 __ TailCallRuntime(Runtime::kMath_pow_cfunction, 2, 1);
3619 __ AllocateHeapNumber(
3620 heapnumber, scratch, scratch2, heapnumbermap, &call_runtime);
3621 __ vstr(double_result,
3624 __ IncrementCounter(counters->math_pow(), 1, scratch, scratch2);
3629 AllowExternalCallThatCantCauseGC scope(masm);
3630 __ PrepareCallCFunction(0, 2, scratch);
3631 __ SetCallCDoubleArguments(double_base, double_exponent);
3633 ExternalReference::power_double_double_function(masm->isolate()),
3637 __ GetCFunctionDoubleResult(double_result);
3640 __ IncrementCounter(counters->math_pow(), 1, scratch, scratch2);
3646 bool CEntryStub::NeedsImmovableCode() {
3652 return (!save_doubles_ ||
ISOLATE->fp_stubs_generated()) &&
3657 void CodeStub::GenerateStubsAheadOfTime() {
3665 void CodeStub::GenerateFPStubs() {
3667 Handle<Code> code = save_doubles.GetCode();
3668 code->set_is_pregenerated(
true);
3670 stub.GetCode()->set_is_pregenerated(
true);
3671 code->GetIsolate()->set_fp_stubs_generated(
true);
3677 Handle<Code> code = stub.GetCode();
3678 code->set_is_pregenerated(
true);
3682 void CEntryStub::GenerateCore(MacroAssembler* masm,
3683 Label* throw_normal_exception,
3684 Label* throw_termination_exception,
3685 Label* throw_out_of_memory_exception,
3687 bool always_allocate) {
3692 Isolate* isolate = masm->isolate();
3696 __ PrepareCallCFunction(1, 0,
r1);
3697 __ CallCFunction(ExternalReference::perform_gc_function(isolate),
3701 ExternalReference scope_depth =
3702 ExternalReference::heap_always_allocate_scope_depth(isolate);
3703 if (always_allocate) {
3704 __ mov(
r0, Operand(scope_depth));
3706 __ add(
r1,
r1, Operand(1));
3715 #if defined(V8_HOST_ARCH_ARM)
3717 int frame_alignment_mask = frame_alignment - 1;
3718 if (FLAG_debug_code) {
3719 if (frame_alignment > kPointerSize) {
3720 Label alignment_as_expected;
3722 __ tst(
sp, Operand(frame_alignment_mask));
3723 __ b(
eq, &alignment_as_expected);
3725 __ stop(
"Unexpected alignment");
3726 __ bind(&alignment_as_expected);
3731 __ mov(
r2, Operand(ExternalReference::isolate_address()));
3743 masm->add(
lr,
pc, Operand(4));
3748 if (always_allocate) {
3751 __ mov(
r2, Operand(scope_depth));
3753 __ sub(
r3,
r3, Operand(1));
3758 Label failure_returned;
3761 __ add(
r2,
r0, Operand(1));
3763 __ b(
eq, &failure_returned);
3770 __ LeaveExitFrame(save_doubles_,
r4);
3775 __ bind(&failure_returned);
3782 __ cmp(
r0, Operand(reinterpret_cast<int32_t>(out_of_memory)));
3783 __ b(
eq, throw_out_of_memory_exception);
3786 __ mov(
r3, Operand(isolate->factory()->the_hole_value()));
3787 __ mov(
ip, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
3794 __ cmp(
r0, Operand(isolate->factory()->termination_exception()));
3795 __ b(
eq, throw_termination_exception);
3798 __ jmp(throw_normal_exception);
3821 __ sub(
r6,
r6, Operand(kPointerSize));
3824 FrameScope scope(masm, StackFrame::MANUAL);
3825 __ EnterExitFrame(save_doubles_);
3835 Label throw_normal_exception;
3836 Label throw_termination_exception;
3837 Label throw_out_of_memory_exception;
3841 &throw_normal_exception,
3842 &throw_termination_exception,
3843 &throw_out_of_memory_exception,
3849 &throw_normal_exception,
3850 &throw_termination_exception,
3851 &throw_out_of_memory_exception,
3857 __ mov(
r0, Operand(reinterpret_cast<int32_t>(failure)));
3859 &throw_normal_exception,
3860 &throw_termination_exception,
3861 &throw_out_of_memory_exception,
3865 __ bind(&throw_out_of_memory_exception);
3867 Isolate* isolate = masm->isolate();
3868 ExternalReference external_caught(Isolate::kExternalCaughtExceptionAddress,
3871 __ mov(
r2, Operand(external_caught));
3876 __ mov(
r0, Operand(reinterpret_cast<int32_t>(out_of_memory)));
3877 __ mov(
r2, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
3882 __ bind(&throw_termination_exception);
3883 __ ThrowUncatchable(
r0);
3885 __ bind(&throw_normal_exception);
3897 Label invoke, handler_entry, exit;
3905 CpuFeatures::Scope scope(
VFP3);
3909 __ vmov(kDoubleRegZero, 0.0);
3931 Isolate* isolate = masm->isolate();
3932 __ mov(
r8, Operand(-1));
3933 int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY;
3937 Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate)));
3945 Label non_outermost_js;
3946 ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate);
3947 __ mov(
r5, Operand(ExternalReference(js_entry_sp)));
3949 __ cmp(
r6, Operand::Zero());
3950 __ b(
ne, &non_outermost_js);
3955 __ bind(&non_outermost_js);
3969 __ bind(&handler_entry);
3970 handler_offset_ = handler_entry.pos();
3975 __ mov(
ip, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
3986 __ PushTryHandler(StackHandler::JS_ENTRY, 0);
3993 __ mov(
r5, Operand(isolate->factory()->the_hole_value()));
3994 __ mov(
ip, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
4009 ExternalReference construct_entry(Builtins::kJSConstructEntryTrampoline,
4011 __ mov(
ip, Operand(construct_entry));
4013 ExternalReference entry(Builtins::kJSEntryTrampoline, isolate);
4014 __ mov(
ip, Operand(entry));
4033 Label non_outermost_js_2;
4036 __ b(
ne, &non_outermost_js_2);
4037 __ mov(
r6, Operand::Zero());
4038 __ mov(
r5, Operand(ExternalReference(js_entry_sp)));
4040 __ bind(&non_outermost_js_2);
4045 Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate)));
4053 if (FLAG_debug_code) {
4059 CpuFeatures::Scope scope(
VFP3);
4079 ASSERT(HasArgsInRegisters() || !HasCallSiteInlineCheck());
4081 ASSERT(!ReturnTrueFalseObject() || HasCallSiteInlineCheck());
4084 const Register
object =
r0;
4086 const Register
function =
r1;
4087 const Register prototype =
r4;
4088 const Register inline_site =
r9;
4089 const Register scratch =
r2;
4093 Label slow, loop, is_instance, is_not_instance, not_js_object;
4095 if (!HasArgsInRegisters()) {
4101 __ JumpIfSmi(
object, ¬_js_object);
4102 __ IsObjectJSObjectType(
object, map, scratch, ¬_js_object);
4106 if (!HasCallSiteInlineCheck()) {
4108 __ CompareRoot(
function, Heap::kInstanceofCacheFunctionRootIndex);
4110 __ CompareRoot(map, Heap::kInstanceofCacheMapRootIndex);
4112 __ LoadRoot(
r0, Heap::kInstanceofCacheAnswerRootIndex);
4113 __ Ret(HasArgsInRegisters() ? 0 : 2);
4119 __ TryGetFunctionPrototype(
function, prototype, scratch, &slow,
true);
4122 __ JumpIfSmi(prototype, &slow);
4123 __ IsObjectJSObjectType(prototype, scratch, scratch, &slow);
4127 if (!HasCallSiteInlineCheck()) {
4128 __ StoreRoot(
function, Heap::kInstanceofCacheFunctionRootIndex);
4129 __ StoreRoot(map, Heap::kInstanceofCacheMapRootIndex);
4131 ASSERT(HasArgsInRegisters());
4136 __ LoadFromSafepointRegisterSlot(scratch,
r4);
4137 __ sub(inline_site,
lr, scratch);
4139 __ GetRelocatedValueLocation(inline_site, scratch);
4149 Register scratch2 = map;
4153 __ LoadRoot(scratch2, Heap::kNullValueRootIndex);
4155 __ cmp(scratch, Operand(prototype));
4156 __ b(
eq, &is_instance);
4157 __ cmp(scratch, scratch2);
4158 __ b(
eq, &is_not_instance);
4163 __ bind(&is_instance);
4164 if (!HasCallSiteInlineCheck()) {
4166 __ StoreRoot(
r0, Heap::kInstanceofCacheAnswerRootIndex);
4169 __ LoadRoot(
r0, Heap::kTrueValueRootIndex);
4170 __ add(inline_site, inline_site, Operand(kDeltaToLoadBoolResult));
4172 __ GetRelocatedValueLocation(inline_site, scratch);
4175 if (!ReturnTrueFalseObject()) {
4179 __ Ret(HasArgsInRegisters() ? 0 : 2);
4181 __ bind(&is_not_instance);
4182 if (!HasCallSiteInlineCheck()) {
4184 __ StoreRoot(
r0, Heap::kInstanceofCacheAnswerRootIndex);
4187 __ LoadRoot(
r0, Heap::kFalseValueRootIndex);
4188 __ add(inline_site, inline_site, Operand(kDeltaToLoadBoolResult));
4190 __ GetRelocatedValueLocation(inline_site, scratch);
4193 if (!ReturnTrueFalseObject()) {
4197 __ Ret(HasArgsInRegisters() ? 0 : 2);
4199 Label object_not_null, object_not_null_or_smi;
4200 __ bind(¬_js_object);
4203 __ JumpIfSmi(
function, &slow);
4208 __ cmp(scratch, Operand(masm->isolate()->factory()->null_value()));
4209 __ b(
ne, &object_not_null);
4211 __ Ret(HasArgsInRegisters() ? 0 : 2);
4213 __ bind(&object_not_null);
4215 __ JumpIfNotSmi(
object, &object_not_null_or_smi);
4217 __ Ret(HasArgsInRegisters() ? 0 : 2);
4219 __ bind(&object_not_null_or_smi);
4221 __ IsObjectJSStringType(
object, scratch, &slow);
4223 __ Ret(HasArgsInRegisters() ? 0 : 2);
4227 if (!ReturnTrueFalseObject()) {
4228 if (HasArgsInRegisters()) {
4238 __ cmp(
r0, Operand::Zero());
4239 __ LoadRoot(
r0, Heap::kTrueValueRootIndex,
eq);
4240 __ LoadRoot(
r0, Heap::kFalseValueRootIndex,
ne);
4241 __ Ret(HasArgsInRegisters() ? 0 : 2);
4252 void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
4255 const int kDisplacement =
4260 __ JumpIfNotSmi(
r1, &slow);
4299 __ TailCallRuntime(Runtime::kGetArgumentsProperty, 1, 1);
4303 void ArgumentsAccessStub::GenerateNewNonStrictSlow(MacroAssembler* masm) {
4323 __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1);
4327 void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) {
4341 Label adaptor_frame, try_allocate;
4345 __ b(
eq, &adaptor_frame);
4349 __ b(&try_allocate);
4352 __ bind(&adaptor_frame);
4364 __ bind(&try_allocate);
4368 const int kParameterMapHeaderSize =
4389 const int kNormalOffset =
4391 const int kAliasedOffset =
4396 __ cmp(
r1, Operand::Zero());
4413 const int kCalleeOffset = JSObject::kHeaderSize +
4419 const int kLengthOffset = JSObject::kHeaderSize +
4434 Label skip_parameter_map;
4439 __ b(
eq, &skip_parameter_map);
4441 __ LoadRoot(
r6, Heap::kNonStrictArgumentsElementsMapRootIndex);
4447 __ add(
r6,
r6, Operand(kParameterMapHeaderSize));
4458 Label parameters_loop, parameters_test;
4463 __ LoadRoot(
r7, Heap::kTheHoleValueRootIndex);
4465 __ add(
r3,
r3, Operand(kParameterMapHeaderSize));
4473 __ jmp(¶meters_test);
4475 __ bind(¶meters_loop);
4483 __ bind(¶meters_test);
4485 __ b(
ne, ¶meters_loop);
4487 __ bind(&skip_parameter_map);
4492 __ LoadRoot(
r5, Heap::kFixedArrayMapRootIndex);
4496 Label arguments_loop, arguments_test;
4500 __ jmp(&arguments_test);
4502 __ bind(&arguments_loop);
4503 __ sub(
r4,
r4, Operand(kPointerSize));
4509 __ bind(&arguments_test);
4511 __ b(
lt, &arguments_loop);
4514 __ add(
sp,
sp, Operand(3 * kPointerSize));
4521 __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1);
4525 void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
4530 Label adaptor_frame, try_allocate, runtime;
4534 __ b(
eq, &adaptor_frame);
4538 __ b(&try_allocate);
4541 __ bind(&adaptor_frame);
4550 Label add_arguments_object;
4551 __ bind(&try_allocate);
4553 __ b(
eq, &add_arguments_object);
4556 __ bind(&add_arguments_object);
4560 __ AllocateInNewSpace(
r1,
4595 __ LoadRoot(
r3, Heap::kFixedArrayMapRootIndex);
4611 __ sub(
r1,
r1, Operand(1));
4617 __ add(
sp,
sp, Operand(3 * kPointerSize));
4622 __ TailCallRuntime(Runtime::kNewStrictArgumentsFast, 3, 1);
4626 void RegExpExecStub::Generate(MacroAssembler* masm) {
4630 #ifdef V8_INTERPRETED_REGEXP
4631 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
4632 #else // V8_INTERPRETED_REGEXP
4645 Label runtime, invoke_regexp;
4652 Register subject =
r4;
4653 Register regexp_data =
r5;
4654 Register last_match_info_elements =
r6;
4657 Isolate* isolate = masm->isolate();
4658 ExternalReference address_of_regexp_stack_memory_address =
4659 ExternalReference::address_of_regexp_stack_memory_address(isolate);
4660 ExternalReference address_of_regexp_stack_memory_size =
4661 ExternalReference::address_of_regexp_stack_memory_size(isolate);
4662 __ mov(
r0, Operand(address_of_regexp_stack_memory_size));
4664 __ cmp(
r0, Operand(0));
4670 __ JumpIfSmi(
r0, &runtime);
4676 if (FLAG_debug_code) {
4678 __ Check(
ne,
"Unexpected type for RegExp data, FixedArray expected");
4680 __ Check(
eq,
"Unexpected type for RegExp data, FixedArray expected");
4697 __ add(
r2,
r2, Operand(2));
4706 __ JumpIfSmi(subject, &runtime);
4707 Condition is_string = masm->IsObjectStringType(subject,
r0);
4719 __ JumpIfNotSmi(
r0, &runtime);
4728 __ JumpIfSmi(
r0, &runtime);
4732 __ ldr(last_match_info_elements,
4735 __ CompareRoot(
r0, Heap::kFixedArrayMapRootIndex);
4746 __ mov(
r9, Operand(0));
4762 __ b(
eq, &seq_string);
4773 Label cons_string, external_string, check_encoding;
4779 __ b(
lt, &cons_string);
4780 __ b(
eq, &external_string);
4792 __ jmp(&check_encoding);
4794 __ bind(&cons_string);
4796 __ CompareRoot(
r0, Heap::kEmptyStringRootIndex);
4800 __ bind(&check_encoding);
4805 __ b(
ne, &external_string);
4807 __ bind(&seq_string);
4822 __ JumpIfSmi(
r7, &runtime);
4839 __ IncrementCounter(isolate->counters()->regexp_entry_native(), 1,
r0,
r2);
4842 const int kRegExpExecuteArguments = 9;
4843 const int kParameterRegisters = 4;
4844 __ EnterExitFrame(
false, kRegExpExecuteArguments - kParameterRegisters);
4850 __ mov(
r0, Operand(ExternalReference::isolate_address()));
4854 __ mov(
r0, Operand(1));
4858 __ mov(
r0, Operand(address_of_regexp_stack_memory_address));
4860 __ mov(
r2, Operand(address_of_regexp_stack_memory_size));
4867 __ mov(
r0, Operand(0));
4872 Operand(ExternalReference::address_of_static_offsets_vector(isolate)));
4878 __ eor(
r3,
r3, Operand(1));
4883 __ ldr(subject,
MemOperand(
fp, kSubjectOffset + 2 * kPointerSize));
4899 __ mov(
r0, subject);
4903 DirectCEntryStub stub;
4904 stub.GenerateCall(masm,
r7);
4916 __ cmp(
r0, Operand(1));
4930 __ mov(
r1, Operand(isolate->factory()->the_hole_value()));
4931 __ mov(
r2, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
4940 __ CompareRoot(
r0, Heap::kTerminationExceptionRootIndex);
4942 Label termination_exception;
4943 __ b(
eq, &termination_exception);
4947 __ bind(&termination_exception);
4948 __ ThrowUncatchable(
r0);
4952 __ mov(
r0, Operand(masm->isolate()->factory()->null_value()));
4953 __ add(
sp,
sp, Operand(4 * kPointerSize));
4963 __ add(
r1,
r1, Operand(2));
4975 __ mov(
r2, subject);
4976 __ RecordWriteField(last_match_info_elements,
4985 __ RecordWriteField(last_match_info_elements,
4993 ExternalReference address_of_static_offsets_vector =
4994 ExternalReference::address_of_static_offsets_vector(isolate);
4995 __ mov(
r2, Operand(address_of_static_offsets_vector));
4999 Label next_capture, done;
5003 last_match_info_elements,
5005 __ bind(&next_capture);
5013 __ jmp(&next_capture);
5018 __ add(
sp,
sp, Operand(4 * kPointerSize));
5023 __ bind(&external_string);
5026 if (FLAG_debug_code) {
5030 __ Assert(
eq,
"external string expected, but not found");
5039 __ jmp(&seq_string);
5043 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
5044 #endif // V8_INTERPRETED_REGEXP
5048 void RegExpConstructResultStub::Generate(MacroAssembler* masm) {
5049 const int kMaxInlineLength = 100;
5052 Factory* factory = masm->isolate()->factory();
5057 __ JumpIfNotSmi(
r1, &slowcase);
5059 __ b(
hi, &slowcase);
5069 __ add(
r2,
r5, Operand(objects_size));
5070 __ AllocateInNewSpace(
5087 __ mov(
r4, Operand(factory->empty_fixed_array()));
5108 __ mov(
r2, Operand(factory->fixed_array_map()));
5114 __ mov(
r2, Operand(factory->the_hole_value()));
5122 __ cmp(
r5, Operand(0));
5130 __ add(
sp,
sp, Operand(3 * kPointerSize));
5134 __ TailCallRuntime(Runtime::kRegExpConstructResult, 3, 1);
5138 static void GenerateRecordCallTarget(MacroAssembler* masm) {
5147 masm->isolate()->heap()->undefined_value());
5149 masm->isolate()->heap()->the_hole_value());
5158 __ CompareRoot(
r3, Heap::kUndefinedValueRootIndex);
5163 __ CompareRoot(
r3, Heap::kTheHoleValueRootIndex);
5166 __ LoadRoot(
ip, Heap::kUndefinedValueRootIndex,
ne);
5180 Label slow, non_function;
5185 if (ReceiverMightBeImplicit()) {
5191 __ CompareRoot(
r4, Heap::kTheHoleValueRootIndex);
5202 __ JumpIfSmi(
r1, &non_function);
5207 if (RecordCallTarget()) {
5208 GenerateRecordCallTarget(masm);
5213 ParameterCount actual(argc_);
5215 if (ReceiverMightBeImplicit()) {
5216 Label call_as_function;
5217 __ CompareRoot(
r4, Heap::kTheHoleValueRootIndex);
5218 __ b(
eq, &call_as_function);
5219 __ InvokeFunction(
r1,
5224 __ bind(&call_as_function);
5226 __ InvokeFunction(
r1,
5234 if (RecordCallTarget()) {
5239 masm->isolate()->heap()->undefined_value());
5240 __ LoadRoot(
ip, Heap::kUndefinedValueRootIndex);
5245 __ b(
ne, &non_function);
5249 __ GetBuiltinEntry(
r3, Builtins::CALL_FUNCTION_PROXY);
5252 Handle<Code> adaptor =
5253 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline();
5254 __ Jump(adaptor, RelocInfo::CODE_TARGET);
5259 __ bind(&non_function);
5261 __ mov(
r0, Operand(argc_));
5263 __ GetBuiltinEntry(
r3, Builtins::CALL_NON_FUNCTION);
5265 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
5266 RelocInfo::CODE_TARGET);
5274 Label slow, non_function_call;
5277 __ JumpIfSmi(
r1, &non_function_call);
5282 if (RecordCallTarget()) {
5283 GenerateRecordCallTarget(masm);
5297 __ b(
ne, &non_function_call);
5298 __ GetBuiltinEntry(
r3, Builtins::CALL_FUNCTION_PROXY_AS_CONSTRUCTOR);
5301 __ bind(&non_function_call);
5302 __ GetBuiltinEntry(
r3, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR);
5307 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
5308 RelocInfo::CODE_TARGET);
5314 void CompareStub::PrintName(StringStream* stream) {
5317 const char* cc_name;
5319 case lt: cc_name =
"LT";
break;
5320 case gt: cc_name =
"GT";
break;
5321 case le: cc_name =
"LE";
break;
5322 case ge: cc_name =
"GE";
break;
5323 case eq: cc_name =
"EQ";
break;
5324 case ne: cc_name =
"NE";
break;
5325 default: cc_name =
"UnknownCondition";
break;
5327 bool is_equality = cc_ ==
eq || cc_ ==
ne;
5328 stream->Add(
"CompareStub_%s", cc_name);
5329 stream->Add(lhs_.
is(
r0) ?
"_r0" :
"_r1");
5330 stream->Add(rhs_.
is(
r0) ?
"_r0" :
"_r1");
5331 if (strict_ && is_equality) stream->Add(
"_STRICT");
5332 if (never_nan_nan_ && is_equality) stream->Add(
"_NO_NAN");
5333 if (!include_number_compare_) stream->Add(
"_NO_NUMBER");
5334 if (!include_smi_compare_) stream->Add(
"_NO_SMI");
5338 int CompareStub::MinorKey() {
5342 ASSERT((static_cast<unsigned>(cc_) >> 28) < (1 << 12));
5345 return ConditionField::encode(static_cast<unsigned>(cc_) >> 28)
5346 | RegisterField::encode(lhs_.
is(
r0))
5347 | StrictField::encode(strict_)
5348 | NeverNanNanField::encode(cc_ ==
eq ? never_nan_nan_ :
false)
5349 | IncludeNumberCompareField::encode(include_number_compare_)
5350 | IncludeSmiCompareField::encode(include_smi_compare_);
5358 Label got_char_code;
5359 Label sliced_string;
5362 __ JumpIfSmi(object_, receiver_not_string_);
5369 __ b(
ne, receiver_not_string_);
5372 __ JumpIfNotSmi(index_, &index_not_smi_);
5373 __ bind(&got_smi_index_);
5377 __ cmp(
ip, Operand(index_));
5378 __ b(
ls, index_out_of_range_);
5394 MacroAssembler* masm,
5395 const RuntimeCallHelper& call_helper) {
5396 __ Abort(
"Unexpected fallthrough to CharCodeAt slow case");
5399 __ bind(&index_not_smi_);
5403 Heap::kHeapNumberMapRootIndex,
5406 call_helper.BeforeCall(masm);
5410 __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero, 1);
5414 __ CallRuntime(Runtime::kNumberToSmi, 1);
5418 __ Move(index_,
r0);
5423 call_helper.AfterCall(masm);
5425 __ JumpIfNotSmi(index_, index_out_of_range_);
5427 __ jmp(&got_smi_index_);
5432 __ bind(&call_runtime_);
5433 call_helper.BeforeCall(masm);
5435 __ Push(object_, index_);
5436 __ CallRuntime(Runtime::kStringCharCodeAt, 2);
5437 __ Move(result_,
r0);
5438 call_helper.AfterCall(masm);
5441 __ Abort(
"Unexpected fallthrough from CharCodeAt slow case");
5456 __ b(
ne, &slow_case_);
5458 __ LoadRoot(result_, Heap::kSingleCharacterStringCacheRootIndex);
5463 __ CompareRoot(result_, Heap::kUndefinedValueRootIndex);
5464 __ b(
eq, &slow_case_);
5470 MacroAssembler* masm,
5471 const RuntimeCallHelper& call_helper) {
5472 __ Abort(
"Unexpected fallthrough to CharFromCode slow case");
5474 __ bind(&slow_case_);
5475 call_helper.BeforeCall(masm);
5477 __ CallRuntime(Runtime::kCharFromCode, 1);
5478 __ Move(result_,
r0);
5479 call_helper.AfterCall(masm);
5482 __ Abort(
"Unexpected fallthrough from CharFromCode slow case");
5496 MacroAssembler* masm,
5497 const RuntimeCallHelper& call_helper) {
5498 char_code_at_generator_.
GenerateSlow(masm, call_helper);
5499 char_from_code_generator_.
GenerateSlow(masm, call_helper);
5514 __ add(count, count, Operand(count),
SetCC);
5524 __ sub(count, count, Operand(1),
SetCC);
5533 enum CopyCharactersFlags {
5535 DEST_ALWAYS_ALIGNED = 2
5549 bool ascii = (flags & COPY_ASCII) != 0;
5550 bool dest_always_aligned = (flags & DEST_ALWAYS_ALIGNED) != 0;
5552 if (dest_always_aligned && FLAG_debug_code) {
5556 __ Check(
eq,
"Destination of copy not aligned.");
5559 const int kReadAlignment = 4;
5560 const int kReadAlignmentMask = kReadAlignment - 1;
5569 __ add(count, count, Operand(count),
SetCC);
5578 __ cmp(count, Operand(8));
5579 __ add(count, dest, Operand(count));
5580 Register limit = count;
5581 __ b(
lt, &byte_loop);
5583 if (!dest_always_aligned) {
5585 __ and_(scratch4, dest, Operand(kReadAlignmentMask),
SetCC);
5587 __ b(
eq, &dest_aligned);
5588 __ cmp(scratch4, Operand(2));
5595 __ bind(&dest_aligned);
5600 __ sub(scratch4, dest, Operand(src));
5601 __ and_(scratch4, scratch4, Operand(0x03),
SetCC);
5602 __ b(
eq, &simple_loop);
5610 __ mov(scratch4, Operand(scratch4,
LSL, 3));
5611 Register left_shift = scratch4;
5612 __ and_(src, src, Operand(~3));
5616 __ rsb(scratch2, left_shift, Operand(32));
5617 Register right_shift = scratch2;
5618 __ mov(scratch1, Operand(scratch1,
LSR, right_shift));
5622 __ sub(scratch5, limit, Operand(dest));
5623 __ orr(scratch1, scratch1, Operand(scratch3,
LSL, left_shift));
5625 __ mov(scratch1, Operand(scratch3,
LSR, right_shift));
5628 __ sub(scratch5, scratch5, Operand(8),
SetCC);
5638 __ add(scratch5, scratch5, Operand(4),
SetCC);
5640 __ cmp(scratch4, Operand(scratch5,
LSL, 3),
ne);
5645 __ cmp(scratch5, Operand(2));
5657 __ bind(&simple_loop);
5662 __ sub(scratch3, limit, Operand(dest));
5666 __ cmp(scratch3, Operand(8));
5671 __ bind(&byte_loop);
5672 __ cmp(dest, Operand(limit));
5692 Register scratch = scratch3;
5696 Label not_array_index;
5697 __ sub(scratch, c1, Operand(static_cast<int>(
'0')));
5698 __ cmp(scratch, Operand(static_cast<int>(
'9' -
'0')));
5699 __ b(
hi, ¬_array_index);
5700 __ sub(scratch, c2, Operand(static_cast<int>(
'0')));
5701 __ cmp(scratch, Operand(static_cast<int>(
'9' -
'0')));
5707 __ b(
ls, not_found);
5709 __ bind(¬_array_index);
5711 Register hash = scratch1;
5717 Register chars = c1;
5725 Register symbol_table = c2;
5728 Register undefined = scratch4;
5729 __ LoadRoot(undefined, Heap::kUndefinedValueRootIndex);
5732 Register mask = scratch2;
5734 __ mov(mask, Operand(mask,
ASR, 1));
5735 __ sub(mask, mask, Operand(1));
5738 Register first_symbol_table_element = symbol_table;
5739 __ add(first_symbol_table_element, symbol_table,
5752 const int kProbes = 4;
5753 Label found_in_symbol_table;
5754 Label next_probe[kProbes];
5755 Register candidate = scratch5;
5756 for (
int i = 0; i < kProbes; i++) {
5759 __ add(candidate, hash, Operand(SymbolTable::GetProbeOffset(i)));
5761 __ mov(candidate, hash);
5764 __ and_(candidate, candidate, Operand(mask));
5777 __ b(
ne, &is_string);
5779 __ cmp(undefined, candidate);
5780 __ b(
eq, not_found);
5782 if (FLAG_debug_code) {
5783 __ LoadRoot(
ip, Heap::kTheHoleValueRootIndex);
5784 __ cmp(
ip, candidate);
5785 __ Assert(
eq,
"oddball in symbol table is not undefined or the hole");
5787 __ jmp(&next_probe[i]);
5789 __ bind(&is_string);
5794 __ JumpIfInstanceTypeIsNotSequentialAscii(scratch, scratch, &next_probe[i]);
5799 __ b(
ne, &next_probe[i]);
5804 __ cmp(chars, scratch);
5805 __ b(
eq, &found_in_symbol_table);
5806 __ bind(&next_probe[i]);
5813 Register result = candidate;
5814 __ bind(&found_in_symbol_table);
5815 __ Move(
r0, result);
5821 Register character) {
5823 __ LoadRoot(hash, Heap::kHashSeedRootIndex);
5827 __ add(hash, hash, Operand(hash,
LSL, 10));
5829 __ eor(hash, hash, Operand(hash,
LSR, 6));
5835 Register character) {
5837 __ add(hash, hash, Operand(character));
5839 __ add(hash, hash, Operand(hash,
LSL, 10));
5841 __ eor(hash, hash, Operand(hash,
LSR, 6));
5848 __ add(hash, hash, Operand(hash,
LSL, 3));
5850 __ eor(hash, hash, Operand(hash,
LSR, 11));
5852 __ add(hash, hash, Operand(hash,
LSL, 15));
5861 void SubStringStub::Generate(MacroAssembler* masm) {
5901 __ JumpIfSmi(
r0, &runtime);
5912 __ b(
eq, &return_r0);
5923 Label underlying_unpacked, sliced_string, seq_or_external_string;
5928 __ b(
eq, &seq_or_external_string);
5931 __ b(
ne, &sliced_string);
5934 __ CompareRoot(
r5, Heap::kEmptyStringRootIndex);
5940 __ jmp(&underlying_unpacked);
5942 __ bind(&sliced_string);
5950 __ jmp(&underlying_unpacked);
5952 __ bind(&seq_or_external_string);
5956 __ bind(&underlying_unpacked);
5958 if (FLAG_string_slices) {
5966 __ b(
lt, ©_routine);
5972 Label two_byte_slice, set_slice_header;
5976 __ b(
eq, &two_byte_slice);
5977 __ AllocateAsciiSlicedString(
r0,
r2,
r6,
r7, &runtime);
5978 __ jmp(&set_slice_header);
5979 __ bind(&two_byte_slice);
5980 __ AllocateTwoByteSlicedString(
r0,
r2,
r6,
r7, &runtime);
5981 __ bind(&set_slice_header);
5987 __ bind(©_routine);
5994 Label two_byte_sequential, sequential_string, allocate_result;
5998 __ b(
eq, &sequential_string);
6007 __ jmp(&allocate_result);
6009 __ bind(&sequential_string);
6014 __ bind(&allocate_result);
6018 __ b(
eq, &two_byte_sequential);
6034 COPY_ASCII | DEST_ALWAYS_ALIGNED);
6038 __ bind(&two_byte_sequential);
6039 __ AllocateTwoByteString(
r0,
r2,
r4,
r6,
r7, &runtime);
6055 __ bind(&return_r0);
6056 Counters* counters = masm->isolate()->counters();
6057 __ IncrementCounter(counters->sub_string_native(), 1,
r3,
r4);
6058 __ add(
sp,
sp, Operand(3 * kPointerSize));
6063 __ TailCallRuntime(Runtime::kSubString, 3, 1);
6072 Register scratch3) {
6073 Register length = scratch1;
6076 Label strings_not_equal, check_zero_length;
6079 __ cmp(length, scratch2);
6080 __ b(
eq, &check_zero_length);
6081 __ bind(&strings_not_equal);
6086 Label compare_chars;
6087 __ bind(&check_zero_length);
6089 __ cmp(length, Operand(0));
6090 __ b(
ne, &compare_chars);
6095 __ bind(&compare_chars);
6096 GenerateAsciiCharsCompareLoop(masm,
6097 left, right, length, scratch2, scratch3,
6098 &strings_not_equal);
6112 Register scratch4) {
6113 Label result_not_equal, compare_lengths;
6117 __ sub(scratch3, scratch1, Operand(scratch2),
SetCC);
6118 Register length_delta = scratch3;
6120 Register min_length = scratch1;
6122 __ cmp(min_length, Operand(0));
6123 __ b(
eq, &compare_lengths);
6126 GenerateAsciiCharsCompareLoop(masm,
6127 left, right, min_length, scratch2, scratch4,
6131 __ bind(&compare_lengths);
6134 __ mov(
r0, Operand(length_delta),
SetCC);
6135 __ bind(&result_not_equal);
6144 void StringCompareStub::GenerateAsciiCharsCompareLoop(
6145 MacroAssembler* masm,
6151 Label* chars_not_equal) {
6155 __ SmiUntag(length);
6156 __ add(scratch1, length,
6158 __ add(left, left, Operand(scratch1));
6159 __ add(right, right, Operand(scratch1));
6160 __ rsb(length, length, Operand::Zero());
6161 Register index = length;
6168 __ cmp(scratch1, scratch2);
6169 __ b(
ne, chars_not_equal);
6170 __ add(index, index, Operand(1),
SetCC);
6175 void StringCompareStub::Generate(MacroAssembler* masm) {
6178 Counters* counters = masm->isolate()->counters();
6187 __ b(
ne, ¬_same);
6191 __ IncrementCounter(counters->string_compare_native(), 1,
r1,
r2);
6192 __ add(
sp,
sp, Operand(2 * kPointerSize));
6198 __ JumpIfNotBothSequentialAsciiStrings(
r1,
r0,
r2,
r3, &runtime);
6201 __ IncrementCounter(counters->string_compare_native(), 1,
r2,
r3);
6202 __ add(
sp,
sp, Operand(2 * kPointerSize));
6208 __ TailCallRuntime(Runtime::kStringCompare, 2, 1);
6212 void StringAddStub::Generate(MacroAssembler* masm) {
6213 Label call_runtime, call_builtin;
6216 Counters* counters = masm->isolate()->counters();
6228 __ JumpIfEitherSmi(
r0,
r1, &call_runtime);
6238 __ b(
ne, &call_runtime);
6244 GenerateConvertArgument(
6245 masm, 1 * kPointerSize,
r0,
r2,
r3,
r4,
r5, &call_builtin);
6246 builtin_id = Builtins::STRING_ADD_RIGHT;
6248 ASSERT((flags_ & NO_STRING_CHECK_LEFT_IN_STUB) != 0);
6249 GenerateConvertArgument(
6250 masm, 0 * kPointerSize,
r1,
r2,
r3,
r4,
r5, &call_builtin);
6251 builtin_id = Builtins::STRING_ADD_LEFT;
6261 Label strings_not_empty;
6271 __ b(
ne, &strings_not_empty);
6273 __ IncrementCounter(counters->string_add_native(), 1,
r2,
r3);
6274 __ add(
sp,
sp, Operand(2 * kPointerSize));
6277 __ bind(&strings_not_empty);
6290 Label string_add_flat_result, longer_than_two;
6296 __ cmp(
r6, Operand(2));
6297 __ b(
ne, &longer_than_two);
6306 __ JumpIfBothInstanceTypesAreNotSequentialAscii(
r4,
r5,
r6,
r7,
6315 Label make_two_character_string;
6318 __ IncrementCounter(counters->string_add_native(), 1,
r2,
r3);
6319 __ add(
sp,
sp, Operand(2 * kPointerSize));
6322 __ bind(&make_two_character_string);
6328 __ mov(
r6, Operand(2));
6329 __ AllocateAsciiString(
r0,
r6,
r4,
r5,
r9, &call_runtime);
6331 __ IncrementCounter(counters->string_add_native(), 1,
r2,
r3);
6332 __ add(
sp,
sp, Operand(2 * kPointerSize));
6335 __ bind(&longer_than_two);
6338 __ b(
lt, &string_add_flat_result);
6344 __ b(
hs, &call_runtime);
6354 Label non_ascii, allocated, ascii_data;
6358 __ b(
eq, &non_ascii);
6361 __ bind(&ascii_data);
6362 __ AllocateAsciiConsString(
r7,
r6,
r4,
r5, &call_runtime);
6363 __ bind(&allocated);
6368 __ IncrementCounter(counters->string_add_native(), 1,
r2,
r3);
6369 __ add(
sp,
sp, Operand(2 * kPointerSize));
6372 __ bind(&non_ascii);
6379 __ b(
ne, &ascii_data);
6384 __ b(
eq, &ascii_data);
6387 __ AllocateTwoByteConsString(
r7,
r6,
r4,
r5, &call_runtime);
6401 Label first_prepared, second_prepared;
6402 __ bind(&string_add_flat_result);
6413 __ b(
ne, &call_runtime);
6423 __ b(
eq, &first_prepared);
6427 __ b(
ne, &call_runtime);
6429 __ bind(&first_prepared);
6439 __ b(
eq, &second_prepared);
6443 __ b(
ne, &call_runtime);
6445 __ bind(&second_prepared);
6447 Label non_ascii_string_add_flat_result;
6456 __ b(
eq, &non_ascii_string_add_flat_result);
6458 __ AllocateAsciiString(
r0,
r6,
r4,
r5,
r9, &call_runtime);
6469 __ IncrementCounter(counters->string_add_native(), 1,
r2,
r3);
6470 __ add(
sp,
sp, Operand(2 * kPointerSize));
6473 __ bind(&non_ascii_string_add_flat_result);
6474 __ AllocateTwoByteString(
r0,
r6,
r4,
r5,
r9, &call_runtime);
6485 __ IncrementCounter(counters->string_add_native(), 1,
r2,
r3);
6486 __ add(
sp,
sp, Operand(2 * kPointerSize));
6490 __ bind(&call_runtime);
6491 __ TailCallRuntime(Runtime::kStringAdd, 2, 1);
6493 if (call_builtin.is_linked()) {
6494 __ bind(&call_builtin);
6500 void StringAddStub::GenerateConvertArgument(MacroAssembler* masm,
6509 Label not_string, done;
6510 __ JumpIfSmi(arg, ¬_string);
6516 __ bind(¬_string);
6526 __ mov(arg, scratch1);
6531 __ bind(¬_cached);
6532 __ JumpIfSmi(arg, slow);
6533 __ CompareObjectType(
6549 void ICCompareStub::GenerateSmis(MacroAssembler* masm) {
6553 __ JumpIfNotSmi(
r2, &miss);
6555 if (GetCondition() ==
eq) {
6570 void ICCompareStub::GenerateHeapNumbers(MacroAssembler* masm) {
6574 Label unordered, maybe_undefined1, maybe_undefined2;
6577 __ JumpIfSmi(
r2, &generic_stub);
6580 __ b(
ne, &maybe_undefined1);
6582 __ b(
ne, &maybe_undefined2);
6587 CpuFeatures::Scope scope(
VFP3);
6596 __ VFPCompareAndSetFlags(
d0,
d1);
6599 __ b(
vs, &unordered);
6608 __ bind(&unordered);
6610 __ bind(&generic_stub);
6611 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET);
6613 __ bind(&maybe_undefined1);
6615 __ CompareRoot(
r0, Heap::kUndefinedValueRootIndex);
6618 __ b(
ne, &maybe_undefined2);
6622 __ bind(&maybe_undefined2);
6624 __ CompareRoot(
r1, Heap::kUndefinedValueRootIndex);
6625 __ b(
eq, &unordered);
6633 void ICCompareStub::GenerateSymbols(MacroAssembler* masm) {
6639 Register right =
r0;
6644 __ JumpIfEitherSmi(left, right, &miss);
6652 __ and_(tmp1, tmp1, Operand(tmp2));
6657 __ cmp(left, right);
6671 void ICCompareStub::GenerateStrings(MacroAssembler* masm) {
6679 Register right =
r0;
6686 __ JumpIfEitherSmi(left, right, &miss);
6695 __ orr(tmp3, tmp1, tmp2);
6700 __ cmp(left, right);
6713 __ and_(tmp3, tmp1, Operand(tmp2));
6723 __ JumpIfBothInstanceTypesAreNotSequentialAscii(
6724 tmp1, tmp2, tmp3, tmp4, &runtime);
6729 masm, left, right, tmp1, tmp2, tmp3);
6732 masm, left, right, tmp1, tmp2, tmp3, tmp4);
6737 __ Push(left, right);
6739 __ TailCallRuntime(Runtime::kStringEquals, 2, 1);
6741 __ TailCallRuntime(Runtime::kStringCompare, 2, 1);
6749 void ICCompareStub::GenerateObjects(MacroAssembler* masm) {
6753 __ JumpIfSmi(
r2, &miss);
6769 void ICCompareStub::GenerateKnownObjects(MacroAssembler* masm) {
6772 __ JumpIfSmi(
r2, &miss);
6775 __ cmp(
r2, Operand(known_map_));
6777 __ cmp(
r3, Operand(known_map_));
6789 void ICCompareStub::GenerateMiss(MacroAssembler* masm) {
6792 ExternalReference miss =
6793 ExternalReference(IC_Utility(IC::kCompareIC_Miss), masm->isolate());
6801 __ CallExternalReference(miss, 3);
6820 ExternalReference
function) {
6821 __ mov(
r2, Operand(
function));
6828 __ mov(
lr, Operand(reinterpret_cast<intptr_t>(GetCode().location()),
6829 RelocInfo::CODE_TARGET));
6842 masm->SizeOfCodeGeneratedSince(&start));
6850 Register properties,
6851 Handle<String>
name,
6852 Register scratch0) {
6858 for (
int i = 0; i < kInlinedProbes; i++) {
6861 Register index = scratch0;
6864 __ sub(index, index, Operand(1));
6865 __ and_(index, index, Operand(
6866 Smi::FromInt(name->Hash() + StringDictionary::GetProbeOffset(i))));
6870 __ add(index, index, Operand(index,
LSL, 1));
6872 Register entity_name = scratch0;
6875 Register tmp = properties;
6876 __ add(tmp, properties, Operand(index,
LSL, 1));
6879 ASSERT(!tmp.is(entity_name));
6880 __ LoadRoot(tmp, Heap::kUndefinedValueRootIndex);
6881 __ cmp(entity_name, tmp);
6884 if (i != kInlinedProbes - 1) {
6886 __ LoadRoot(tmp, Heap::kTheHoleValueRootIndex);
6889 __ cmp(entity_name, Operand(Handle<String>(name)));
6893 __ cmp(entity_name, tmp);
6894 __ b(
eq, &the_hole);
6898 __ ldrb(entity_name,
6911 const int spill_mask =
6917 __ mov(
r1, Operand(Handle<String>(name)));
6920 __ cmp(
r0, Operand(0));
6938 Register scratch2) {
6939 ASSERT(!elements.is(scratch1));
6940 ASSERT(!elements.is(scratch2));
6941 ASSERT(!name.is(scratch1));
6942 ASSERT(!name.is(scratch2));
6945 if (FLAG_debug_code)
__ AbortIfNotString(name);
6950 __ sub(scratch1, scratch1, Operand(1));
6955 for (
int i = 0; i < kInlinedProbes; i++) {
6962 ASSERT(StringDictionary::GetProbeOffset(i) <
6964 __ add(scratch2, scratch2, Operand(
6972 __ add(scratch2, scratch2, Operand(scratch2,
LSL, 1));
6975 __ add(scratch2, elements, Operand(scratch2,
LSL, 2));
6977 __ cmp(name, Operand(
ip));
6981 const int spill_mask =
6984 ~(scratch1.bit() | scratch2.bit());
6990 __ Move(
r0, elements);
6992 __ Move(
r0, elements);
6997 __ cmp(
r0, Operand(0));
6998 __ mov(scratch2, Operand(
r2));
7018 Register result =
r0;
7019 Register dictionary =
r0;
7021 Register index =
r2;
7024 Register undefined =
r5;
7025 Register entry_key =
r6;
7027 Label in_dictionary, maybe_in_dictionary, not_in_dictionary;
7031 __ sub(mask, mask, Operand(1));
7035 __ LoadRoot(undefined, Heap::kUndefinedValueRootIndex);
7037 for (
int i = kInlinedProbes; i < kTotalProbes; i++) {
7044 ASSERT(StringDictionary::GetProbeOffset(i) <
7046 __ add(index, hash, Operand(
7049 __ mov(index, Operand(hash));
7055 __ add(index, index, Operand(index,
LSL, 1));
7058 __ add(index, dictionary, Operand(index,
LSL, 2));
7062 __ cmp(entry_key, Operand(undefined));
7063 __ b(
eq, ¬_in_dictionary);
7066 __ cmp(entry_key, Operand(key));
7067 __ b(
eq, &in_dictionary);
7075 __ b(
eq, &maybe_in_dictionary);
7079 __ bind(&maybe_in_dictionary);
7084 __ mov(result, Operand::Zero());
7088 __ bind(&in_dictionary);
7089 __ mov(result, Operand(1));
7092 __ bind(¬_in_dictionary);
7093 __ mov(result, Operand::Zero());
7098 struct AheadOfTimeWriteBarrierStubList {
7099 Register object, value, address;
7103 #define REG(Name) { kRegister_ ## Name ## _Code }
7105 static const AheadOfTimeWriteBarrierStubList kAheadOfTime[] = {
7141 for (
const AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime;
7142 !entry->object.is(
no_reg);
7144 if (object_.
is(entry->object) &&
7145 value_.
is(entry->value) &&
7146 address_.
is(entry->address) &&
7147 remembered_set_action_ == entry->action &&
7163 stub1.GetCode()->set_is_pregenerated(
true);
7168 for (
const AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime;
7169 !entry->object.is(
no_reg);
7176 stub.GetCode()->set_is_pregenerated(
true);
7185 void RecordWriteStub::Generate(MacroAssembler* masm) {
7186 Label skip_to_incremental_noncompacting;
7187 Label skip_to_incremental_compacting;
7198 __ b(&skip_to_incremental_noncompacting);
7199 __ b(&skip_to_incremental_compacting);
7203 __ RememberedSetHelper(object_,
7211 __ bind(&skip_to_incremental_noncompacting);
7214 __ bind(&skip_to_incremental_compacting);
7226 void RecordWriteStub::GenerateIncremental(MacroAssembler* masm, Mode mode) {
7230 Label dont_need_remembered_set;
7232 __ ldr(regs_.scratch0(),
MemOperand(regs_.address(), 0));
7233 __ JumpIfNotInNewSpace(regs_.scratch0(),
7235 &dont_need_remembered_set);
7237 __ CheckPageFlag(regs_.object(),
7241 &dont_need_remembered_set);
7245 CheckNeedsToInformIncrementalMarker(
7246 masm, kUpdateRememberedSetOnNoNeedToInformIncrementalMarker, mode);
7247 InformIncrementalMarker(masm, mode);
7248 regs_.Restore(masm);
7249 __ RememberedSetHelper(object_,
7255 __ bind(&dont_need_remembered_set);
7258 CheckNeedsToInformIncrementalMarker(
7259 masm, kReturnOnNoNeedToInformIncrementalMarker, mode);
7260 InformIncrementalMarker(masm, mode);
7261 regs_.Restore(masm);
7266 void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm, Mode mode) {
7267 regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode_);
7268 int argument_count = 3;
7269 __ PrepareCallCFunction(argument_count, regs_.scratch0());
7271 r0.
is(regs_.address()) ? regs_.scratch0() : regs_.address();
7272 ASSERT(!address.is(regs_.object()));
7274 __ Move(address, regs_.address());
7275 __ Move(
r0, regs_.object());
7277 __ Move(
r1, address);
7282 __ mov(
r2, Operand(ExternalReference::isolate_address()));
7284 AllowExternalCallThatCantCauseGC scope(masm);
7287 ExternalReference::incremental_evacuation_record_write_function(
7293 ExternalReference::incremental_marking_record_write_function(
7297 regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode_);
7301 void RecordWriteStub::CheckNeedsToInformIncrementalMarker(
7302 MacroAssembler* masm,
7303 OnNoNeedToInformIncrementalMarker on_no_need,
7306 Label need_incremental;
7307 Label need_incremental_pop_scratch;
7311 __ JumpIfBlack(regs_.object(), regs_.scratch0(), regs_.scratch1(), &on_black);
7313 regs_.Restore(masm);
7314 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
7315 __ RememberedSetHelper(object_,
7327 __ ldr(regs_.scratch0(),
MemOperand(regs_.address(), 0));
7330 Label ensure_not_white;
7332 __ CheckPageFlag(regs_.scratch0(),
7338 __ CheckPageFlag(regs_.object(),
7344 __ bind(&ensure_not_white);
7349 __ Push(regs_.object(), regs_.address());
7350 __ EnsureNotWhite(regs_.scratch0(),
7354 &need_incremental_pop_scratch);
7355 __ Pop(regs_.object(), regs_.address());
7357 regs_.Restore(masm);
7358 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
7359 __ RememberedSetHelper(object_,
7368 __ bind(&need_incremental_pop_scratch);
7369 __ Pop(regs_.object(), regs_.address());
7371 __ bind(&need_incremental);
7377 void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) {
7387 Label double_elements;
7389 Label slow_elements;
7390 Label fast_elements;
7392 __ CheckFastElements(
r2,
r5, &double_elements);
7394 __ JumpIfSmi(
r0, &smi_element);
7395 __ CheckFastSmiElements(
r2,
r5, &fast_elements);
7399 __ bind(&slow_elements);
7405 __ TailCallRuntime(Runtime::kStoreArrayLiteralElement, 5, 1);
7408 __ bind(&fast_elements);
7420 __ bind(&smi_element);
7427 __ bind(&double_elements);
7438 #endif // V8_TARGET_ARCH_ARM
static const int kResourceDataOffset
static const int kCallerFPOffset
void GenerateFast(MacroAssembler *masm)
void GenerateSlow(MacroAssembler *masm, const RuntimeCallHelper &call_helper)
static const int kLengthOffset
static const int kBitFieldOffset
void GenerateFast(MacroAssembler *masm)
STATIC_CHECK((kStringRepresentationMask|kStringEncodingMask)==Internals::kFullStringRepresentationMask)
static const int kNumRegisters
const intptr_t kSmiTagMask
static int GetBranchOffset(Instr instr)
static const int kCodeOffset
static const int kEvacuationCandidateMask
#define CHECK_EQ(expected, value)
static void LoadNumberAsInt32Double(MacroAssembler *masm, Register object, Destination destination, DwVfpRegister double_dst, Register dst1, Register dst2, Register heap_number_map, Register scratch1, Register scratch2, SwVfpRegister single_scratch, Label *not_int32)
void GenerateSlow(MacroAssembler *masm, const RuntimeCallHelper &call_helper)
static const int kCodeEntryOffset
static const int kMaxAsciiCharCode
const RegList kCallerSaved
static const int kPrototypeOrInitialMapOffset
#define COMPARE(asm_, compare_string)
static int SlotOffset(int index)
RecordWriteStub(Register object, Register value, Register address, RememberedSetAction remembered_set_action, SaveFPRegsMode fp_mode)
static const int kStaticOffsetsVectorSize
static const int kArgumentsObjectSize
static void GenerateFixedRegStubsAheadOfTime()
const uint32_t kTwoByteStringTag
const int kFailureTypeTagSize
static const char * GetName(TypeInfo type_info)
static Failure * InternalError()
static void GenerateCopyCharacters(MacroAssembler *masm, Register dest, Register src, Register count, Register scratch, bool ascii)
static const char * Name(Value tok)
static Smi * FromInt(int value)
void Generate(MacroAssembler *masm)
static void DoubleIs32BitInteger(MacroAssembler *masm, Register src1, Register src2, Register dst, Register scratch, Label *not_int32)
value format" "after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false, "print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false, "print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false, "report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true, "garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true, "flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true, "use incremental marking") DEFINE_bool(incremental_marking_steps, true, "do incremental marking steps") DEFINE_bool(trace_incremental_marking, false, "trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true, "Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false, "Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true, "use inline caching") DEFINE_bool(native_code_counters, false, "generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false, "Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true, "Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false, "Never perform compaction on full GC-testing only") DEFINE_bool(compact_code_space, true, "Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true, "Flush inline caches prior to mark compact collection and" "flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0, "Default seed for initializing random generator" "(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true, "allows verbose printing") DEFINE_bool(allow_natives_syntax, false, "allow natives syntax") DEFINE_bool(trace_sim, false, "Trace simulator execution") DEFINE_bool(check_icache, false, "Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0, "Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8, "Stack alingment in bytes in simulator(4 or 8, 8 is default)") DEFINE_bool(trace_exception, false, "print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false, "preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true, "randomize hashes to avoid predictable hash collisions" "(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0, "Fixed seed to use to hash property keys(0 means random)" "(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false, "activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true, "generate optimized regexp code") DEFINE_bool(testing_bool_flag, true, "testing_bool_flag") DEFINE_int(testing_int_flag, 13, "testing_int_flag") DEFINE_float(testing_float_flag, 2.5, "float-flag") DEFINE_string(testing_string_flag, "Hello, world!", "string-flag") DEFINE_int(testing_prng_seed, 42, "Seed used for threading test randomness") DEFINE_string(testing_serialization_file, "/tmp/serdes", "file in which to serialize heap") DEFINE_bool(help, false, "Print usage message, including flags, on console") DEFINE_bool(dump_counters, false, "Dump counters on exit") DEFINE_string(map_counters, "", "Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT, "Pass all remaining arguments to the script.Alias for\"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#43"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2#define FLAG_MODE_DEFINE_DEFAULTS#1"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flag-definitions.h"1#define FLAG_FULL(ftype, ctype, nam, def, cmt)#define FLAG_READONLY(ftype, ctype, nam, def, cmt)#define DEFINE_implication(whenflag, thenflag)#define DEFINE_bool(nam, def, cmt)#define DEFINE_int(nam, def, cmt)#define DEFINE_float(nam, def, cmt)#define DEFINE_string(nam, def, cmt)#define DEFINE_args(nam, def, cmt)#define FLAG DEFINE_bool(use_strict, false,"enforce strict mode") DEFINE_bool(es5_readonly, false,"activate correct semantics for inheriting readonliness") DEFINE_bool(es52_globals, false,"activate new semantics for global var declarations") DEFINE_bool(harmony_typeof, false,"enable harmony semantics for typeof") DEFINE_bool(harmony_scoping, false,"enable harmony block scoping") DEFINE_bool(harmony_modules, false,"enable harmony modules (implies block scoping)") DEFINE_bool(harmony_proxies, false,"enable harmony proxies") DEFINE_bool(harmony_collections, false,"enable harmony collections (sets, maps, and weak maps)") DEFINE_bool(harmony, false,"enable all harmony features (except typeof)") DEFINE_implication(harmony, harmony_scoping) DEFINE_implication(harmony, harmony_modules) DEFINE_implication(harmony, harmony_proxies) DEFINE_implication(harmony, harmony_collections) DEFINE_implication(harmony_modules, harmony_scoping) DEFINE_bool(packed_arrays, false,"optimizes arrays that have no holes") DEFINE_bool(smi_only_arrays, true,"tracks arrays with only smi values") DEFINE_bool(clever_optimizations, true,"Optimize object size, Array shift, DOM strings and string +") DEFINE_bool(unbox_double_arrays, true,"automatically unbox arrays of doubles") DEFINE_bool(string_slices, true,"use string slices") DEFINE_bool(crankshaft, true,"use crankshaft") DEFINE_string(hydrogen_filter,"","optimization filter") DEFINE_bool(use_range, true,"use hydrogen range analysis") DEFINE_bool(eliminate_dead_phis, true,"eliminate dead phis") DEFINE_bool(use_gvn, true,"use hydrogen global value numbering") DEFINE_bool(use_canonicalizing, true,"use hydrogen instruction canonicalizing") DEFINE_bool(use_inlining, true,"use function inlining") DEFINE_int(max_inlined_source_size, 600,"maximum source size in bytes considered for a single inlining") DEFINE_int(max_inlined_nodes, 196,"maximum number of AST nodes considered for a single inlining") DEFINE_int(max_inlined_nodes_cumulative, 196,"maximum cumulative number of AST nodes considered for inlining") DEFINE_bool(loop_invariant_code_motion, true,"loop invariant code motion") DEFINE_bool(collect_megamorphic_maps_from_stub_cache, true,"crankshaft harvests type feedback from stub cache") DEFINE_bool(hydrogen_stats, false,"print statistics for hydrogen") DEFINE_bool(trace_hydrogen, false,"trace generated hydrogen to file") DEFINE_string(trace_phase,"Z","trace generated IR for specified phases") DEFINE_bool(trace_inlining, false,"trace inlining decisions") DEFINE_bool(trace_alloc, false,"trace register allocator") DEFINE_bool(trace_all_uses, false,"trace all use positions") DEFINE_bool(trace_range, false,"trace range analysis") DEFINE_bool(trace_gvn, false,"trace global value numbering") DEFINE_bool(trace_representation, false,"trace representation types") DEFINE_bool(stress_pointer_maps, false,"pointer map for every instruction") DEFINE_bool(stress_environments, false,"environment for every instruction") DEFINE_int(deopt_every_n_times, 0,"deoptimize every n times a deopt point is passed") DEFINE_bool(trap_on_deopt, false,"put a break point before deoptimizing") DEFINE_bool(deoptimize_uncommon_cases, true,"deoptimize uncommon cases") DEFINE_bool(polymorphic_inlining, true,"polymorphic inlining") DEFINE_bool(use_osr, true,"use on-stack replacement") DEFINE_bool(array_bounds_checks_elimination, false,"perform array bounds checks elimination") DEFINE_bool(array_index_dehoisting, false,"perform array index dehoisting") DEFINE_bool(trace_osr, false,"trace on-stack replacement") DEFINE_int(stress_runs, 0,"number of stress runs") DEFINE_bool(optimize_closures, true,"optimize closures") DEFINE_bool(inline_construct, true,"inline constructor calls") DEFINE_bool(inline_arguments, true,"inline functions with arguments object") DEFINE_int(loop_weight, 1,"loop weight for representation inference") DEFINE_bool(optimize_for_in, true,"optimize functions containing for-in loops") DEFINE_bool(experimental_profiler, true,"enable all profiler experiments") DEFINE_bool(watch_ic_patching, false,"profiler considers IC stability") DEFINE_int(frame_count, 1,"number of stack frames inspected by the profiler") DEFINE_bool(self_optimization, false,"primitive functions trigger their own optimization") DEFINE_bool(direct_self_opt, false,"call recompile stub directly when self-optimizing") DEFINE_bool(retry_self_opt, false,"re-try self-optimization if it failed") DEFINE_bool(count_based_interrupts, false,"trigger profiler ticks based on counting instead of timing") DEFINE_bool(interrupt_at_exit, false,"insert an interrupt check at function exit") DEFINE_bool(weighted_back_edges, false,"weight back edges by jump distance for interrupt triggering") DEFINE_int(interrupt_budget, 5900,"execution budget before interrupt is triggered") DEFINE_int(type_info_threshold, 15,"percentage of ICs that must have type info to allow optimization") DEFINE_int(self_opt_count, 130,"call count before self-optimization") DEFINE_implication(experimental_profiler, watch_ic_patching) DEFINE_implication(experimental_profiler, self_optimization) DEFINE_implication(experimental_profiler, retry_self_opt) DEFINE_implication(experimental_profiler, count_based_interrupts) DEFINE_implication(experimental_profiler, interrupt_at_exit) DEFINE_implication(experimental_profiler, weighted_back_edges) DEFINE_bool(trace_opt_verbose, false,"extra verbose compilation tracing") DEFINE_implication(trace_opt_verbose, trace_opt) DEFINE_bool(debug_code, false,"generate extra code (assertions) for debugging") DEFINE_bool(code_comments, false,"emit comments in code disassembly") DEFINE_bool(enable_sse2, true,"enable use of SSE2 instructions if available") DEFINE_bool(enable_sse3, true,"enable use of SSE3 instructions if available") DEFINE_bool(enable_sse4_1, true,"enable use of SSE4.1 instructions if available") DEFINE_bool(enable_cmov, true,"enable use of CMOV instruction if available") DEFINE_bool(enable_rdtsc, true,"enable use of RDTSC instruction if available") DEFINE_bool(enable_sahf, true,"enable use of SAHF instruction if available (X64 only)") DEFINE_bool(enable_vfp3, true,"enable use of VFP3 instructions if available - this implies ""enabling ARMv7 instructions (ARM only)") DEFINE_bool(enable_armv7, true,"enable use of ARMv7 instructions if available (ARM only)") DEFINE_bool(enable_fpu, true,"enable use of MIPS FPU instructions if available (MIPS only)") DEFINE_string(expose_natives_as, NULL,"expose natives in global object") DEFINE_string(expose_debug_as, NULL,"expose debug in global object") DEFINE_bool(expose_gc, false,"expose gc extension") DEFINE_bool(expose_externalize_string, false,"expose externalize string extension") DEFINE_int(stack_trace_limit, 10,"number of stack frames to capture") DEFINE_bool(builtins_in_stack_traces, false,"show built-in functions in stack traces") DEFINE_bool(disable_native_files, false,"disable builtin natives files") DEFINE_bool(inline_new, true,"use fast inline allocation") DEFINE_bool(stack_trace_on_abort, true,"print a stack trace if an assertion failure occurs") DEFINE_bool(trace, false,"trace function calls") DEFINE_bool(mask_constants_with_cookie, true,"use random jit cookie to mask large constants") DEFINE_bool(lazy, true,"use lazy compilation") DEFINE_bool(trace_opt, false,"trace lazy optimization") DEFINE_bool(trace_opt_stats, false,"trace lazy optimization statistics") DEFINE_bool(opt, true,"use adaptive optimizations") DEFINE_bool(always_opt, false,"always try to optimize functions") DEFINE_bool(prepare_always_opt, false,"prepare for turning on always opt") DEFINE_bool(trace_deopt, false,"trace deoptimization") DEFINE_int(min_preparse_length, 1024,"minimum length for automatic enable preparsing") DEFINE_bool(always_full_compiler, false,"try to use the dedicated run-once backend for all code") DEFINE_bool(trace_bailout, false,"print reasons for falling back to using the classic V8 backend") DEFINE_bool(compilation_cache, true,"enable compilation cache") DEFINE_bool(cache_prototype_transitions, true,"cache prototype transitions") DEFINE_bool(trace_debug_json, false,"trace debugging JSON request/response") DEFINE_bool(debugger_auto_break, true,"automatically set the debug break flag when debugger commands are ""in the queue") DEFINE_bool(enable_liveedit, true,"enable liveedit experimental feature") DEFINE_bool(break_on_abort, true,"always cause a debug break before aborting") DEFINE_int(stack_size, kPointerSize *123,"default size of stack region v8 is allowed to use (in kBytes)") DEFINE_int(max_stack_trace_source_length, 300,"maximum length of function source code printed in a stack trace.") DEFINE_bool(always_inline_smi_code, false,"always inline smi code in non-opt code") DEFINE_int(max_new_space_size, 0,"max size of the new generation (in kBytes)") DEFINE_int(max_old_space_size, 0,"max size of the old generation (in Mbytes)") DEFINE_int(max_executable_size, 0,"max size of executable memory (in Mbytes)") DEFINE_bool(gc_global, false,"always perform global GCs") DEFINE_int(gc_interval,-1,"garbage collect after <n> allocations") DEFINE_bool(trace_gc, false,"print one trace line following each garbage collection") DEFINE_bool(trace_gc_nvp, false,"print one detailed trace line in name=value format ""after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false,"print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false,"print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false,"report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true,"garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true,"flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true,"use incremental marking") DEFINE_bool(incremental_marking_steps, true,"do incremental marking steps") DEFINE_bool(trace_incremental_marking, false,"trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true,"Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false,"Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true,"use inline caching") DEFINE_bool(native_code_counters, false,"generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false,"Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true,"Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false,"Never perform compaction on full GC - testing only") DEFINE_bool(compact_code_space, true,"Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true,"Flush inline caches prior to mark compact collection and ""flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0,"Default seed for initializing random generator ""(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true,"allows verbose printing") DEFINE_bool(allow_natives_syntax, false,"allow natives syntax") DEFINE_bool(trace_sim, false,"Trace simulator execution") DEFINE_bool(check_icache, false,"Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0,"Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8,"Stack alingment in bytes in simulator (4 or 8, 8 is default)") DEFINE_bool(trace_exception, false,"print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false,"preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true,"randomize hashes to avoid predictable hash collisions ""(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0,"Fixed seed to use to hash property keys (0 means random)""(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false,"activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true,"generate optimized regexp code") DEFINE_bool(testing_bool_flag, true,"testing_bool_flag") DEFINE_int(testing_int_flag, 13,"testing_int_flag") DEFINE_float(testing_float_flag, 2.5,"float-flag") DEFINE_string(testing_string_flag,"Hello, world!","string-flag") DEFINE_int(testing_prng_seed, 42,"Seed used for threading test randomness") DEFINE_string(testing_serialization_file,"/tmp/serdes","file in which to serialize heap") DEFINE_bool(help, false,"Print usage message, including flags, on console") DEFINE_bool(dump_counters, false,"Dump counters on exit") DEFINE_string(map_counters,"","Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT,"Pass all remaining arguments to the script. Alias for \"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#47"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2 namespace{struct Flag{enum FlagType{TYPE_BOOL, TYPE_INT, TYPE_FLOAT, TYPE_STRING, TYPE_ARGS} name
static const int kDataOffset
static const int kGlobalReceiverOffset
static void GenerateCopyCharactersLong(MacroAssembler *masm, Register dest, Register src, Register count, Register scratch1, Register scratch2, Register scratch3, Register scratch4, Register scratch5, int flags)
void Generate(MacroAssembler *masm)
static Failure * OutOfMemoryException()
static void LoadNumberAsInt32(MacroAssembler *masm, Register object, Register dst, Register heap_number_map, Register scratch1, Register scratch2, Register scratch3, DwVfpRegister double_scratch, Label *not_int32)
static void GenerateHashGetHash(MacroAssembler *masm, Register hash)
static const int kExponentBias
static Handle< Object > UninitializedSentinel(Isolate *isolate)
SwVfpRegister high() const
static bool IsSupported(CpuFeature f)
static Failure * Exception()
void Generate(MacroAssembler *masm)
virtual bool IsPregenerated()
void Generate(MacroAssembler *masm)
static const int kCallerSPOffset
bool CanBeUndetectable() const
#define ASSERT(condition)
static void LoadOperands(MacroAssembler *masm, FloatingPointHelper::Destination destination, Register heap_number_map, Register scratch1, Register scratch2, Label *not_number)
friend class BlockConstPoolScope
WriteInt32ToHeapNumberStub(Register the_int, Register the_heap_number, Register scratch)
const int kPointerSizeLog2
static const int kInstanceSizeOffset
static void GenerateCompareFlatAsciiStrings(MacroAssembler *masm, Register left, Register right, Register scratch1, Register scratch2, Register scratch3, Register scratch4)
static Handle< Object > MegamorphicSentinel(Isolate *isolate)
#define kFirstCalleeSavedDoubleReg
static const char * GetName(TypeInfo type_info)
const uint32_t kStringRepresentationMask
MemOperand GlobalObjectOperand()
static DwVfpRegister from_code(int code)
static const int kEntrySize
const intptr_t kObjectAlignmentMask
static const int kGlobalContextOffset
MemOperand ContextOperand(Register context, int index)
static const int kContextOffset
const uint32_t kAsciiDataHintTag
const uint32_t kShortExternalStringMask
static void GenerateNegativeLookup(MacroAssembler *masm, Label *miss, Label *done, Register receiver, Register properties, Handle< String > name, Register scratch0)
static const int kLastSubjectOffset
static const int kZeroHash
void Generate(MacroAssembler *masm)
static const int kHashFieldOffset
static const int kLastCaptureCountOffset
static const int kFirstOffset
static const int kMinLength
StringDictionaryLookupStub(LookupMode mode)
const int kNumDoubleCalleeSaved
const uint32_t kNotStringTag
static const int kParentOffset
static const int kNonMantissaBitsInTopWord
static const int kLiteralsOffset
DwVfpRegister DoubleRegister
static const int kArgumentsObjectSizeStrict
STATIC_ASSERT((FixedDoubleArray::kHeaderSize &kDoubleAlignmentMask)==0)
static int ActivationFrameAlignment()
static const int kLengthOffset
static const int kCapacityOffset
const uint32_t kIsSymbolMask
static const int kExponentShift
const intptr_t kFailureTagMask
static const int kValueOffset
const int kFailureTagSize
static void GenerateFlatAsciiStringEquals(MacroAssembler *masm, Register left, Register right, Register scratch1, Register scratch2, Register scratch3)
#define kLastCalleeSavedDoubleReg
static const int kIrregexpCaptureCountOffset
static const int kInputOffset
static bool IsBitOp(Value op)
const uint32_t kIsIndirectStringMask
void Generate(MacroAssembler *masm)
static void LoadSmis(MacroAssembler *masm, Destination destination, Register scratch1, Register scratch2)
virtual bool IsPregenerated()
static void CallCCodeForDoubleOperation(MacroAssembler *masm, Token::Value op, Register heap_number_result, Register scratch)
static const int kPcLoadDelta
static const int kStringWrapperSafeForDefaultValueOf
const RegList kCalleeSaved
const uint32_t kAsciiDataHintMask
static void ConvertNumberToInt32(MacroAssembler *masm, Register object, Register dst, Register heap_number_map, Register scratch1, Register scratch2, Register scratch3, DwVfpRegister double_scratch, Label *not_int32)
void Generate(MacroAssembler *masm)
static const int kPropertiesOffset
static void PatchBranchIntoNop(MacroAssembler *masm, int pos)
static const int kMinLength
const uint32_t kShortExternalStringTag
static void GenerateHashAddCharacter(MacroAssembler *masm, Register hash, Register character)
static void Generate(MacroAssembler *masm, Register string, Register index, Register result, Label *call_runtime)
static const int kHeaderSize
static const int kNextFunctionLinkOffset
void Generate(MacroAssembler *masm)
static int SizeFor(int length)
static const int kElementsOffset
const uint32_t kStringTag
static bool IsEqualityOp(Value op)
static const int kOffsetOffset
void Generate(MacroAssembler *masm)
static const int kLengthOffset
static int SizeFor(int length)
void GenerateSlow(MacroAssembler *masm, const RuntimeCallHelper &call_helper)
virtual void Generate(MacroAssembler *masm)
static const int kLastMatchOverhead
static const int kHeaderSize
const intptr_t kPointerAlignmentMask
void Generate(MacroAssembler *masm)
void GenerateCall(MacroAssembler *masm, ExternalReference function)
static const int kMapOffset
static const int kMantissaBitsInTopWord
bool is(Register reg) const
static const int kSkipEvacuationSlotsRecordingMask
const uint32_t kIsNotStringMask
const int kNumCalleeSaved
static void GenerateFixedRegStubsAheadOfTime()
const uint32_t kSlicedNotConsMask
static const int kLengthOffset
static void ConvertIntToDouble(MacroAssembler *masm, Register int_scratch, Destination destination, DwVfpRegister double_dst, Register dst1, Register dst2, Register scratch2, SwVfpRegister single_scratch)
void Generate(MacroAssembler *masm)
void Generate(MacroAssembler *masm)
static const int kSecondOffset
static void GeneratePositiveLookup(MacroAssembler *masm, Label *miss, Label *done, Register elements, Register name, Register r0, Register r1)
static const int kCallerFPOffset
static const int kArgumentsLengthIndex
MemOperand FieldMemOperand(Register object, int offset)
const intptr_t kObjectAlignment
static const int kContextOffset
static const int kFunctionOffset
static const int kFirstCaptureOffset
static const uint32_t kHashBitMask
static const uint32_t kSignMask
static const int kLastInputOffset
static const int kHeaderSize
void GenerateBody(MacroAssembler *masm, bool is_construct)
static const int kDataAsciiCodeOffset
Condition NegateCondition(Condition cond)
#define ASSERT_EQ(v1, v2)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination trace on stack replacement optimize closures functions with arguments object optimize functions containing for in loops profiler considers IC stability primitive functions trigger their own optimization re try self optimization if it failed insert an interrupt check at function exit execution budget before interrupt is triggered call count before self optimization self_optimization count_based_interrupts weighted_back_edges trace_opt emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 enable use of ARMv7 instructions if enable use of MIPS FPU instructions if NULL
static void GenerateAheadOfTime()
static const int kArgumentsCalleeIndex
static const int kIsUndetectable
static const int kHeaderSize
void Generate(MacroAssembler *masm)
void GenerateFast(MacroAssembler *masm)
static void GenerateLookupNumberStringCache(MacroAssembler *masm, Register object, Register result, Register scratch1, Register scratch2, Register scratch3, bool object_is_smi, Label *not_found)
static const int kInstrSize
static const int kDataTagOffset
static const int kPrototypeOffset
static void GenerateFixedRegStubsAheadOfTime()
static const int kElementsStartOffset
static void GenerateTwoCharacterSymbolTableProbe(MacroAssembler *masm, Register c1, Register c2, Register scratch1, Register scratch2, Register scratch3, Register scratch4, Register scratch5, Label *not_found)
static const int kMaxLength
static const int kValueOffset
Operand SmiUntagOperand(Register object)
bool Contains(Type type) const
const uint32_t kSymbolTag
const uint32_t kAsciiStringTag
static const int kConstructStubOffset
static const int kExponentBits
static const int kHashShift
static const int kSharedFunctionInfoOffset
void Generate(MacroAssembler *masm)
static const int kBitField2Offset
void Generate(MacroAssembler *masm)
CEntryStub(int result_size, SaveFPRegsMode save_doubles=kDontSaveFPRegs)
void check(i::Vector< const char > string)
static const int kExponentOffset
static const int kValueOffset
static const int kDataUC16CodeOffset
void Generate(MacroAssembler *masm)
StoreBufferOverflowStub(SaveFPRegsMode save_fp)
virtual bool IsPregenerated()
static void GenerateHashInit(MacroAssembler *masm, Register hash, Register character)
static bool IsOrderedRelationalCompareOp(Value op)
const uint32_t kStringEncodingMask
static const int kInstanceTypeOffset
static const int kIndexOffset
void Generate(MacroAssembler *masm)
static const int kMantissaOffset
void Generate(MacroAssembler *masm)