30 #if defined(V8_TARGET_ARCH_ARM)
40 #define __ ACCESS_MASM(masm)
42 static void EmitIdenticalObjectComparison(MacroAssembler* masm,
46 static void EmitSmiNonsmiComparison(MacroAssembler* masm,
52 static void EmitTwoNonNanDoubleComparison(MacroAssembler* masm,
Condition cond);
53 static void EmitStrictTwoHeapObjectCompare(MacroAssembler* masm,
59 static void EmitCheckForHeapNumber(MacroAssembler* masm, Register operand,
60 Register scratch1, Register scratch2,
61 Label* not_a_heap_number) {
63 __ LoadRoot(scratch2, Heap::kHeapNumberMapRootIndex);
64 __ cmp(scratch1, scratch2);
65 __ b(
ne, not_a_heap_number);
71 Label check_heap_number, call_builtin;
72 __ JumpIfNotSmi(
r0, &check_heap_number);
75 __ bind(&check_heap_number);
76 EmitCheckForHeapNumber(masm,
r0,
r1,
ip, &call_builtin);
79 __ bind(&call_builtin);
88 Counters* counters = masm->isolate()->counters();
103 __ IncrementCounter(counters->fast_new_closure_total(), 1,
r6,
r7);
118 __ LoadRoot(
r1, Heap::kEmptyFixedArrayRootIndex);
119 __ LoadRoot(
r5, Heap::kTheHoleValueRootIndex);
130 Label check_optimized;
131 Label install_unoptimized;
132 if (FLAG_cache_optimized_code) {
136 __ b(
ne, &check_optimized);
138 __ bind(&install_unoptimized);
139 __ LoadRoot(
r4, Heap::kUndefinedValueRootIndex);
148 __ bind(&check_optimized);
150 __ IncrementCounter(counters->fast_new_closure_try_optimized(), 1,
r6,
r7);
155 Label install_optimized;
160 __ b(
eq, &install_optimized);
169 __ b(
eq, &install_unoptimized);
183 __ bind(&install_optimized);
184 __ IncrementCounter(counters->fast_new_closure_install_optimized(),
203 __ RecordWriteContextSlot(
216 __ LoadRoot(
r4, Heap::kFalseValueRootIndex);
218 __ TailCallRuntime(Runtime::kNewClosure, 3, 1);
239 __ LoadRoot(
r1, Heap::kFunctionContextMapRootIndex);
253 __ LoadRoot(
r1, Heap::kUndefinedValueRootIndex);
254 for (
int i = Context::MIN_CONTEXT_SLOTS; i < length; i++) {
265 __ TailCallRuntime(Runtime::kNewFunctionContext, 1, 1);
288 __ LoadRoot(
r2, Heap::kBlockContextMapRootIndex);
297 Label after_sentinel;
298 __ JumpIfNotSmi(
r3, &after_sentinel);
299 if (FLAG_debug_code) {
300 const char*
message =
"Expected 0 as a Smi sentinel";
301 __ cmp(
r3, Operand::Zero());
302 __ Assert(
eq, message);
307 __ bind(&after_sentinel);
317 __ LoadRoot(
r1, Heap::kTheHoleValueRootIndex);
318 for (
int i = 0; i < slots_; i++) {
329 __ TailCallRuntime(Runtime::kPushBlockContext, 2, 1);
333 static void GenerateFastCloneShallowArrayCommon(
334 MacroAssembler* masm,
344 int elements_size = 0;
348 : FixedArray::SizeFor(length);
354 __ AllocateInNewSpace(size,
373 __ add(
r2,
r0, Operand(JSArray::kSize));
396 __ CompareRoot(
r3, Heap::kUndefinedValueRootIndex);
397 __ b(
eq, &slow_case);
401 Label double_elements, check_fast_elements;
404 __ CompareRoot(
r0, Heap::kFixedCOWArrayMapRootIndex);
405 __ b(
ne, &check_fast_elements);
406 GenerateFastCloneShallowArrayCommon(masm, 0,
412 __ bind(&check_fast_elements);
413 __ CompareRoot(
r0, Heap::kFixedArrayMapRootIndex);
414 __ b(
ne, &double_elements);
415 GenerateFastCloneShallowArrayCommon(masm, length_,
421 __ bind(&double_elements);
426 if (FLAG_debug_code) {
430 message =
"Expected (writable) fixed array";
431 expected_map_index = Heap::kFixedArrayMapRootIndex;
433 message =
"Expected (writable) fixed double array";
434 expected_map_index = Heap::kFixedDoubleArrayMapRootIndex;
437 message =
"Expected copy-on-write fixed array";
438 expected_map_index = Heap::kFixedCOWArrayMapRootIndex;
443 __ CompareRoot(
r3, expected_map_index);
444 __ Assert(
eq, message);
448 GenerateFastCloneShallowArrayCommon(masm, length_, mode, &slow_case);
455 __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1);
474 __ CompareRoot(
r3, Heap::kUndefinedValueRootIndex);
475 __ b(
eq, &slow_case);
483 __ b(
ne, &slow_case);
494 __ add(
sp,
sp, Operand(4 * kPointerSize));
498 __ TailCallRuntime(Runtime::kCreateObjectLiteralShallow, 4, 1);
507 class ConvertToDoubleStub :
public CodeStub {
509 ConvertToDoubleStub(Register result_reg_1,
510 Register result_reg_2,
512 Register scratch_reg)
513 : result1_(result_reg_1),
514 result2_(result_reg_2),
516 zeros_(scratch_reg) { }
525 class ModeBits:
public BitField<OverwriteMode, 0, 2> {};
526 class OpBits:
public BitField<Token::Value, 2, 14> {};
528 Major MajorKey() {
return ConvertToDouble; }
531 return result1_.code() +
532 (result2_.code() << 4) +
533 (source_.code() << 8) +
534 (zeros_.code() << 12);
537 void Generate(MacroAssembler* masm);
541 void ConvertToDoubleStub::Generate(MacroAssembler* masm) {
542 Register exponent = result1_;
543 Register mantissa = result2_;
559 __ cmp(source_, Operand(1));
560 __ b(
gt, ¬_special);
563 const uint32_t exponent_word_for_1 =
565 __ orr(exponent, exponent, Operand(exponent_word_for_1),
LeaveCC,
eq);
570 __ bind(¬_special);
573 __ CountLeadingZeros(zeros_, source_, mantissa);
580 __ add(mantissa, mantissa, Operand(fudge));
583 Operand(mantissa,
LSL, HeapNumber::kExponentShift));
585 __ add(zeros_, zeros_, Operand(1));
587 __ mov(source_, Operand(source_,
LSL, zeros_));
603 CpuFeatures::Scope scope(
VFP2);
617 __ mov(scratch1, Operand(
r0));
618 ConvertToDoubleStub stub1(
r3,
r2, scratch1, scratch2);
620 __ Call(stub1.GetCode());
622 __ mov(scratch1, Operand(
r1));
623 ConvertToDoubleStub stub2(
r1,
r0, scratch1, scratch2);
624 __ Call(stub2.GetCode());
631 MacroAssembler* masm,
633 Register heap_number_map,
639 LoadNumber(masm, destination,
640 r0,
d7,
r2,
r3, heap_number_map, scratch1, scratch2, slow);
643 LoadNumber(masm, destination,
644 r1,
d6,
r0,
r1, heap_number_map, scratch1, scratch2, slow);
648 void FloatingPointHelper::LoadNumber(MacroAssembler* masm,
649 Destination destination,
654 Register heap_number_map,
658 __ AssertRootValue(heap_number_map,
659 Heap::kHeapNumberMapRootIndex,
660 "HeapNumberMap register clobbered.");
665 __ UntagAndJumpIfSmi(scratch1,
object, &is_smi);
667 __ JumpIfNotHeapNumber(
object, heap_number_map, scratch1, not_number);
672 CpuFeatures::Scope scope(
VFP2);
686 CpuFeatures::Scope scope(
VFP2);
688 __ vmov(dst.high(), scratch1);
689 __ vcvt_f64_s32(dst, dst.high());
692 __ vmov(dst1, dst2, dst);
697 __ mov(scratch1, Operand(
object));
698 ConvertToDoubleStub stub(dst2, dst1, scratch1, scratch2);
700 __ Call(stub.GetCode());
711 Register heap_number_map,
715 DwVfpRegister double_scratch,
717 __ AssertRootValue(heap_number_map,
718 Heap::kHeapNumberMapRootIndex,
719 "HeapNumberMap register clobbered.");
721 Label not_in_int32_range;
723 __ UntagAndJumpIfSmi(dst,
object, &done);
725 __ cmp(scratch1, heap_number_map);
726 __ b(
ne, not_number);
727 __ ConvertToInt32(
object,
732 ¬_in_int32_range);
735 __ bind(¬_in_int32_range);
739 __ EmitOutOfInt32RangeTruncate(dst,
748 Register int_scratch,
749 Destination destination,
750 DwVfpRegister double_dst,
754 SwVfpRegister single_scratch) {
755 ASSERT(!int_scratch.is(scratch2));
756 ASSERT(!int_scratch.is(dst1));
757 ASSERT(!int_scratch.is(dst2));
762 CpuFeatures::Scope scope(
VFP2);
763 __ vmov(single_scratch, int_scratch);
764 __ vcvt_f64_s32(double_dst, single_scratch);
766 __ vmov(dst1, dst2, double_dst);
769 Label fewer_than_20_useful_bits;
775 __ cmp(int_scratch, Operand::Zero());
776 __ mov(dst2, int_scratch);
777 __ mov(dst1, int_scratch);
783 __ rsb(int_scratch, int_scratch, Operand::Zero(),
SetCC,
mi);
788 __ CountLeadingZeros(dst1, int_scratch, scratch2);
789 __ rsb(dst1, dst1, Operand(31));
793 __ Bfi(dst2, scratch2, scratch2,
797 __ mov(scratch2, Operand(1));
798 __ bic(int_scratch, int_scratch, Operand(scratch2,
LSL, dst1));
803 __ b(
mi, &fewer_than_20_useful_bits);
805 __ orr(dst2, dst2, Operand(int_scratch,
LSR, scratch2));
806 __ rsb(scratch2, scratch2, Operand(32));
807 __ mov(dst1, Operand(int_scratch,
LSL, scratch2));
810 __ bind(&fewer_than_20_useful_bits);
812 __ mov(scratch2, Operand(int_scratch,
LSL, scratch2));
813 __ orr(dst2, dst2, scratch2);
815 __ mov(dst1, Operand::Zero());
823 Destination destination,
824 DwVfpRegister double_dst,
825 DwVfpRegister double_scratch,
828 Register heap_number_map,
831 SwVfpRegister single_scratch,
833 ASSERT(!scratch1.is(
object) && !scratch2.is(
object));
834 ASSERT(!scratch1.is(scratch2));
835 ASSERT(!heap_number_map.is(
object) &&
836 !heap_number_map.is(scratch1) &&
837 !heap_number_map.is(scratch2));
839 Label done, obj_is_not_smi;
841 __ JumpIfNotSmi(
object, &obj_is_not_smi);
842 __ SmiUntag(scratch1,
object);
844 scratch2, single_scratch);
847 __ bind(&obj_is_not_smi);
848 __ AssertRootValue(heap_number_map,
849 Heap::kHeapNumberMapRootIndex,
850 "HeapNumberMap register clobbered.");
851 __ JumpIfNotHeapNumber(
object, heap_number_map, scratch1, not_int32);
855 CpuFeatures::Scope scope(
VFP2);
871 __ vmov(dst1, dst2, double_dst);
875 ASSERT(!scratch1.is(
object) && !scratch2.is(
object));
881 __ orr(scratch1, scratch1, Operand(dst2));
882 __ cmp(scratch1, Operand::Zero());
900 Register heap_number_map,
904 DwVfpRegister double_scratch0,
905 DwVfpRegister double_scratch1,
908 ASSERT(!scratch1.is(
object) && !scratch2.is(
object) && !scratch3.is(
object));
909 ASSERT(!scratch1.is(scratch2) &&
910 !scratch1.is(scratch3) &&
911 !scratch2.is(scratch3));
915 __ UntagAndJumpIfSmi(dst,
object, &done);
917 __ AssertRootValue(heap_number_map,
918 Heap::kHeapNumberMapRootIndex,
919 "HeapNumberMap register clobbered.");
920 __ JumpIfNotHeapNumber(
object, heap_number_map, scratch1, not_int32);
925 CpuFeatures::Scope scope(
VFP2);
947 __ orr(dst, scratch2, Operand(dst));
948 __ cmp(dst, Operand::Zero());
958 __ mov(dst, Operand(dst,
LSR, scratch3));
960 __ rsb(scratch3, scratch3, Operand(32));
961 __ orr(dst, dst, Operand(scratch2,
LSL, scratch3));
981 HeapNumber::kExponentShift,
1000 __ sub(tmp, scratch, Operand(src1,
LSR, 31));
1001 __ cmp(tmp, Operand(30));
1002 __ b(
gt, not_int32);
1004 __ tst(src2, Operand(0x3fffff));
1005 __ b(
ne, not_int32);
1023 __ rsb(scratch, scratch, Operand(32));
1024 __ mov(src2, Operand(1));
1025 __ mov(src1, Operand(src2,
LSL, scratch));
1026 __ sub(src1, src1, Operand(1));
1028 __ b(
ne, not_int32);
1033 MacroAssembler* masm,
1035 Register heap_number_result,
1050 __ PrepareCallCFunction(0, 2, scratch);
1051 if (masm->use_eabi_hardfloat()) {
1052 CpuFeatures::Scope scope(
VFP2);
1057 AllowExternalCallThatCantCauseGC scope(masm);
1059 ExternalReference::double_fp_operation(op, masm->isolate()), 0, 2);
1063 if (masm->use_eabi_hardfloat()) {
1064 CpuFeatures::Scope scope(
VFP2);
1072 __ mov(
r0, Operand(heap_number_result));
1079 if (the_int_.
is(
r1) && the_heap_number_.
is(
r0) && scratch_.
is(
r2)) {
1082 if (the_int_.
is(
r2) && the_heap_number_.
is(
r0) && scratch_.
is(
r3)) {
1095 stub1.GetCode()->set_is_pregenerated(
true);
1096 stub2.GetCode()->set_is_pregenerated(
true);
1101 void WriteInt32ToHeapNumberStub::Generate(MacroAssembler* masm) {
1102 Label max_negative_int;
1107 __ cmp(the_int_, Operand(0x80000000u));
1108 __ b(
eq, &max_negative_int);
1111 uint32_t non_smi_exponent =
1113 __ mov(scratch_, Operand(non_smi_exponent));
1122 ASSERT(((1 << HeapNumber::kExponentShift) & non_smi_exponent) != 0);
1124 __ orr(scratch_, scratch_, Operand(the_int_,
LSR, shift_distance));
1127 __ mov(scratch_, Operand(the_int_,
LSL, 32 - shift_distance));
1132 __ bind(&max_negative_int);
1149 static void EmitIdenticalObjectComparison(MacroAssembler* masm,
1152 bool never_nan_nan) {
1153 Label not_identical;
1154 Label heap_number, return_equal;
1156 __ b(
ne, ¬_identical);
1160 if (cond !=
eq || !never_nan_nan) {
1165 if (cond ==
lt || cond ==
gt) {
1170 __ b(
eq, &heap_number);
1178 if (cond ==
le || cond ==
ge) {
1180 __ b(
ne, &return_equal);
1181 __ LoadRoot(
r2, Heap::kUndefinedValueRootIndex);
1183 __ b(
ne, &return_equal);
1197 __ bind(&return_equal);
1200 }
else if (cond ==
gt) {
1207 if (cond !=
eq || !never_nan_nan) {
1211 if (cond !=
lt && cond !=
gt) {
1212 __ bind(&heap_number);
1223 __ cmp(
r3, Operand(-1));
1224 __ b(
ne, &return_equal);
1249 __ bind(¬_identical);
1254 static void EmitSmiNonsmiComparison(MacroAssembler* masm,
1261 (lhs.is(
r1) && rhs.is(
r0)));
1264 __ JumpIfSmi(rhs, &rhs_is_smi);
1285 CpuFeatures::Scope scope(
VFP2);
1286 __ SmiToDoubleVFPRegister(lhs,
d7,
r7,
s15);
1293 __ mov(
r7, Operand(lhs));
1294 ConvertToDoubleStub stub1(
r3,
r2,
r7,
r6);
1295 __ Call(stub1.GetCode());
1303 __ jmp(lhs_not_nan);
1305 __ bind(&rhs_is_smi);
1324 CpuFeatures::Scope scope(
VFP2);
1329 __ SmiToDoubleVFPRegister(rhs,
d6,
r7,
s13);
1335 __ mov(
r7, Operand(rhs));
1336 ConvertToDoubleStub stub2(
r1,
r0,
r7,
r6);
1337 __ Call(stub2.GetCode());
1344 void EmitNanCheck(MacroAssembler* masm, Label* lhs_not_nan,
Condition cond) {
1346 Register rhs_exponent = exp_first ?
r0 :
r1;
1347 Register lhs_exponent = exp_first ?
r2 :
r3;
1348 Register rhs_mantissa = exp_first ?
r1 :
r0;
1349 Register lhs_mantissa = exp_first ?
r3 :
r2;
1350 Label one_is_nan, neither_is_nan;
1354 HeapNumber::kExponentShift,
1357 __ cmp(
r4, Operand(-1));
1358 __ b(
ne, lhs_not_nan);
1362 __ b(
ne, &one_is_nan);
1364 __ b(
ne, &one_is_nan);
1366 __ bind(lhs_not_nan);
1369 HeapNumber::kExponentShift,
1372 __ cmp(
r4, Operand(-1));
1373 __ b(
ne, &neither_is_nan);
1377 __ b(
ne, &one_is_nan);
1379 __ b(
eq, &neither_is_nan);
1381 __ bind(&one_is_nan);
1384 if (cond ==
lt || cond ==
le) {
1391 __ bind(&neither_is_nan);
1396 static void EmitTwoNonNanDoubleComparison(MacroAssembler* masm,
1399 Register rhs_exponent = exp_first ?
r0 :
r1;
1400 Register lhs_exponent = exp_first ?
r2 :
r3;
1401 Register rhs_mantissa = exp_first ?
r1 :
r0;
1402 Register lhs_mantissa = exp_first ?
r3 :
r2;
1408 __ cmp(rhs_mantissa, Operand(lhs_mantissa));
1409 __ orr(
r0, rhs_mantissa, Operand(lhs_mantissa),
LeaveCC,
ne);
1413 __ sub(
r0, rhs_exponent, Operand(lhs_exponent),
SetCC);
1434 __ PrepareCallCFunction(0, 2,
r5);
1435 if (masm->use_eabi_hardfloat()) {
1436 CpuFeatures::Scope scope(
VFP2);
1441 AllowExternalCallThatCantCauseGC scope(masm);
1442 __ CallCFunction(ExternalReference::compare_doubles(masm->isolate()),
1450 static void EmitStrictTwoHeapObjectCompare(MacroAssembler* masm,
1454 (lhs.is(
r1) && rhs.is(
r0)));
1460 Label first_non_object;
1464 __ b(
lt, &first_non_object);
1467 Label return_not_equal;
1468 __ bind(&return_not_equal);
1471 __ bind(&first_non_object);
1474 __ b(
eq, &return_not_equal);
1477 __ b(
ge, &return_not_equal);
1481 __ b(
eq, &return_not_equal);
1489 __ b(
ne, &return_not_equal);
1494 static void EmitCheckForTwoHeapNumbers(MacroAssembler* masm,
1497 Label* both_loaded_as_doubles,
1498 Label* not_heap_numbers,
1501 (lhs.is(
r1) && rhs.is(
r0)));
1504 __ b(
ne, not_heap_numbers);
1512 CpuFeatures::Scope scope(
VFP2);
1521 __ jmp(both_loaded_as_doubles);
1526 static void EmitCheckForSymbolsOrObjects(MacroAssembler* masm,
1529 Label* possible_strings,
1530 Label* not_both_strings) {
1532 (lhs.is(
r1) && rhs.is(
r0)));
1539 __ b(
ne, &object_test);
1541 __ b(
eq, possible_strings);
1543 __ b(
ge, not_both_strings);
1545 __ b(
eq, possible_strings);
1552 __ bind(&object_test);
1554 __ b(
lt, not_both_strings);
1556 __ b(
lt, not_both_strings);
1579 Register number_string_cache = result;
1580 Register mask = scratch3;
1583 __ LoadRoot(number_string_cache, Heap::kNumberStringCacheRootIndex);
1590 __ sub(mask, mask, Operand(1));
1596 Isolate* isolate = masm->isolate();
1598 Label load_result_from_cache;
1599 if (!object_is_smi) {
1600 __ JumpIfSmi(
object, &is_smi);
1602 CpuFeatures::Scope scope(
VFP2);
1605 Heap::kHeapNumberMapRootIndex,
1613 __ ldm(
ia, scratch1, scratch1.bit() | scratch2.bit());
1614 __ eor(scratch1, scratch1, Operand(scratch2));
1615 __ and_(scratch1, scratch1, Operand(mask));
1620 number_string_cache,
1623 Register probe = mask;
1626 __ JumpIfSmi(probe, not_found);
1631 __ VFPCompareAndSetFlags(
d0,
d1);
1632 __ b(
ne, not_found);
1633 __ b(&load_result_from_cache);
1640 Register scratch = scratch1;
1641 __ and_(scratch, mask, Operand(
object,
ASR, 1));
1645 number_string_cache,
1649 Register probe = mask;
1651 __ cmp(
object, probe);
1652 __ b(
ne, not_found);
1655 __ bind(&load_result_from_cache);
1658 __ IncrementCounter(isolate->counters()->number_to_string_native(),
1665 void NumberToStringStub::Generate(MacroAssembler* masm) {
1672 __ add(
sp,
sp, Operand(1 * kPointerSize));
1677 __ TailCallRuntime(Runtime::kNumberToStringSkipCache, 1, 1);
1689 Label not_smis, both_loaded_as_doubles, lhs_not_nan;
1691 if (include_smi_compare_) {
1692 Label not_two_smis, smi_done;
1694 __ JumpIfNotSmi(
r2, ¬_two_smis);
1698 __ bind(¬_two_smis);
1699 }
else if (FLAG_debug_code) {
1702 __ Assert(
ne,
"CompareStub: unexpected smi operands.");
1710 EmitIdenticalObjectComparison(masm, &slow, cc_, never_nan_nan_);
1716 __ and_(
r2, lhs_, Operand(rhs_));
1717 __ JumpIfNotSmi(
r2, ¬_smis);
1727 EmitSmiNonsmiComparison(masm, lhs_, rhs_, &lhs_not_nan, &slow, strict_);
1729 __ bind(&both_loaded_as_doubles);
1732 Isolate* isolate = masm->isolate();
1734 __ bind(&lhs_not_nan);
1735 CpuFeatures::Scope scope(
VFP2);
1738 __ VFPCompareAndSetFlags(
d7,
d6);
1750 if (cc_ ==
lt || cc_ ==
le) {
1759 EmitNanCheck(masm, &lhs_not_nan, cc_);
1762 EmitTwoNonNanDoubleComparison(masm, cc_);
1771 EmitStrictTwoHeapObjectCompare(masm, lhs_, rhs_);
1774 Label check_for_symbols;
1775 Label flat_string_check;
1780 EmitCheckForTwoHeapNumbers(masm,
1783 &both_loaded_as_doubles,
1785 &flat_string_check);
1787 __ bind(&check_for_symbols);
1790 if (cc_ ==
eq && !strict_) {
1794 EmitCheckForSymbolsOrObjects(masm, lhs_, rhs_, &flat_string_check, &slow);
1799 __ bind(&flat_string_check);
1801 __ JumpIfNonSmisNotBothSequentialAsciiStrings(lhs_, rhs_,
r2,
r3, &slow);
1803 __ IncrementCounter(isolate->counters()->string_compare_native(), 1,
r2,
r3);
1824 __ Push(lhs_, rhs_);
1828 native = strict_ ? Builtins::STRICT_EQUALS : Builtins::EQUALS;
1832 if (cc_ ==
lt || cc_ ==
le) {
1854 const Register map =
r9.
is(tos_) ?
r7 :
r9;
1855 const Register temp = map;
1858 CheckOddball(masm,
UNDEFINED, Heap::kUndefinedValueRootIndex,
false);
1861 CheckOddball(masm,
BOOLEAN, Heap::kFalseValueRootIndex,
false);
1862 CheckOddball(masm,
BOOLEAN, Heap::kTrueValueRootIndex,
true);
1865 CheckOddball(masm,
NULL_TYPE, Heap::kNullValueRootIndex,
false);
1874 __ JumpIfSmi(tos_, &patch);
1905 Label not_heap_number;
1906 __ CompareRoot(map, Heap::kHeapNumberMapRootIndex);
1907 __ b(
ne, ¬_heap_number);
1910 CpuFeatures::Scope scope(
VFP2);
1913 __ VFPCompareAndSetFlags(
d1, 0.0);
1925 __ b(
ne, ¬_zero);
1935 unsigned int shifted_exponent_mask =
1959 __ bind(¬_heap_number);
1963 GenerateTypeTransition(masm);
1967 void ToBooleanStub::CheckOddball(MacroAssembler* masm,
1973 __ LoadRoot(
ip, value);
1985 void ToBooleanStub::GenerateTypeTransition(MacroAssembler* masm) {
1987 __ mov(
r3, Operand(tos_));
1994 __ TailCallExternalReference(
1995 ExternalReference(IC_Utility(IC::kToBoolean_Patch), masm->isolate()),
2007 CpuFeatures::Scope scope(
VFP2);
2014 const int argument_count = 1;
2015 const int fp_argument_count = 0;
2016 const Register scratch =
r1;
2018 AllowExternalCallThatCantCauseGC scope(masm);
2019 __ PrepareCallCFunction(argument_count, fp_argument_count, scratch);
2020 __ mov(
r0, Operand(ExternalReference::isolate_address()));
2022 ExternalReference::store_buffer_overflow_function(masm->isolate()),
2025 CpuFeatures::Scope scope(
VFP2);
2036 void UnaryOpStub::PrintName(StringStream* stream) {
2038 const char* overwrite_name =
NULL;
2043 stream->Add(
"UnaryOpStub_%s_%s_%s",
2051 void UnaryOpStub::Generate(MacroAssembler* masm) {
2052 switch (operand_type_) {
2054 GenerateTypeTransition(masm);
2057 GenerateSmiStub(masm);
2060 GenerateHeapNumberStub(masm);
2063 GenerateGenericStub(masm);
2069 void UnaryOpStub::GenerateTypeTransition(MacroAssembler* masm) {
2076 __ TailCallExternalReference(
2077 ExternalReference(IC_Utility(IC::kUnaryOp_Patch), masm->isolate()), 4, 1);
2082 void UnaryOpStub::GenerateSmiStub(MacroAssembler* masm) {
2085 GenerateSmiStubSub(masm);
2087 case Token::BIT_NOT:
2088 GenerateSmiStubBitNot(masm);
2096 void UnaryOpStub::GenerateSmiStubSub(MacroAssembler* masm) {
2097 Label non_smi, slow;
2098 GenerateSmiCodeSub(masm, &non_smi, &slow);
2101 GenerateTypeTransition(masm);
2105 void UnaryOpStub::GenerateSmiStubBitNot(MacroAssembler* masm) {
2107 GenerateSmiCodeBitNot(masm, &non_smi);
2109 GenerateTypeTransition(masm);
2113 void UnaryOpStub::GenerateSmiCodeSub(MacroAssembler* masm,
2116 __ JumpIfNotSmi(
r0, non_smi);
2128 void UnaryOpStub::GenerateSmiCodeBitNot(MacroAssembler* masm,
2130 __ JumpIfNotSmi(
r0, non_smi);
2140 void UnaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) {
2143 GenerateHeapNumberStubSub(masm);
2145 case Token::BIT_NOT:
2146 GenerateHeapNumberStubBitNot(masm);
2154 void UnaryOpStub::GenerateHeapNumberStubSub(MacroAssembler* masm) {
2155 Label non_smi, slow, call_builtin;
2156 GenerateSmiCodeSub(masm, &non_smi, &call_builtin);
2158 GenerateHeapNumberCodeSub(masm, &slow);
2160 GenerateTypeTransition(masm);
2161 __ bind(&call_builtin);
2162 GenerateGenericCodeFallback(masm);
2166 void UnaryOpStub::GenerateHeapNumberStubBitNot(MacroAssembler* masm) {
2167 Label non_smi, slow;
2168 GenerateSmiCodeBitNot(masm, &non_smi);
2170 GenerateHeapNumberCodeBitNot(masm, &slow);
2172 GenerateTypeTransition(masm);
2175 void UnaryOpStub::GenerateHeapNumberCodeSub(MacroAssembler* masm,
2177 EmitCheckForHeapNumber(masm,
r0,
r1,
r6, slow);
2184 Label slow_allocate_heapnumber, heapnumber_allocated;
2185 __ AllocateHeapNumber(
r1,
r2,
r3,
r6, &slow_allocate_heapnumber);
2186 __ jmp(&heapnumber_allocated);
2188 __ bind(&slow_allocate_heapnumber);
2192 __ CallRuntime(Runtime::kNumberAlloc, 0);
2197 __ bind(&heapnumber_allocated);
2209 void UnaryOpStub::GenerateHeapNumberCodeBitNot(
2210 MacroAssembler* masm, Label* slow) {
2213 EmitCheckForHeapNumber(masm,
r0,
r1,
r6, slow);
2221 __ b(
mi, &try_float);
2228 __ bind(&try_float);
2230 Label slow_allocate_heapnumber, heapnumber_allocated;
2232 __ AllocateHeapNumber(
r2,
r3,
r4,
r6, &slow_allocate_heapnumber);
2233 __ jmp(&heapnumber_allocated);
2235 __ bind(&slow_allocate_heapnumber);
2239 __ CallRuntime(Runtime::kNumberAlloc, 0);
2251 __ bind(&heapnumber_allocated);
2257 CpuFeatures::Scope scope(
VFP2);
2266 WriteInt32ToHeapNumberStub stub(
r1,
r0,
r2);
2267 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET);
2270 __ bind(&impossible);
2271 if (FLAG_debug_code) {
2272 __ stop(
"Incorrect assumption in bit-not stub");
2278 void UnaryOpStub::GenerateGenericStub(MacroAssembler* masm) {
2281 GenerateGenericStubSub(masm);
2283 case Token::BIT_NOT:
2284 GenerateGenericStubBitNot(masm);
2292 void UnaryOpStub::GenerateGenericStubSub(MacroAssembler* masm) {
2293 Label non_smi, slow;
2294 GenerateSmiCodeSub(masm, &non_smi, &slow);
2296 GenerateHeapNumberCodeSub(masm, &slow);
2298 GenerateGenericCodeFallback(masm);
2302 void UnaryOpStub::GenerateGenericStubBitNot(MacroAssembler* masm) {
2303 Label non_smi, slow;
2304 GenerateSmiCodeBitNot(masm, &non_smi);
2306 GenerateHeapNumberCodeBitNot(masm, &slow);
2308 GenerateGenericCodeFallback(masm);
2312 void UnaryOpStub::GenerateGenericCodeFallback(MacroAssembler* masm) {
2319 case Token::BIT_NOT:
2328 void BinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) {
2338 __ TailCallExternalReference(
2339 ExternalReference(IC_Utility(IC::kBinaryOp_Patch),
2346 void BinaryOpStub::GenerateTypeTransitionWithSavedArgs(
2347 MacroAssembler* masm) {
2352 void BinaryOpStub::Generate(MacroAssembler* masm) {
2355 AllowStubCallsScope allow_stub_calls(masm,
true);
2357 switch (operands_type_) {
2359 GenerateTypeTransition(masm);
2362 GenerateSmiStub(masm);
2365 GenerateInt32Stub(masm);
2368 GenerateHeapNumberStub(masm);
2371 GenerateOddballStub(masm);
2374 GenerateBothStringStub(masm);
2377 GenerateStringStub(masm);
2380 GenerateGeneric(masm);
2388 void BinaryOpStub::PrintName(StringStream* stream) {
2390 const char* overwrite_name;
2395 default: overwrite_name =
"UnknownOverwrite";
break;
2397 stream->Add(
"BinaryOpStub_%s_%s_%s",
2404 void BinaryOpStub::GenerateSmiSmiOperation(MacroAssembler* masm) {
2406 Register right =
r0;
2407 Register scratch1 =
r7;
2408 Register scratch2 =
r9;
2413 Label not_smi_result;
2416 __ add(right, left, Operand(right),
SetCC);
2418 __ sub(right, right, Operand(left));
2421 __ sub(right, left, Operand(right),
SetCC);
2423 __ sub(right, left, Operand(right));
2428 __ SmiUntag(
ip, right);
2432 __ smull(scratch1, scratch2, left,
ip);
2435 __ mov(
ip, Operand(scratch1,
ASR, 31));
2436 __ cmp(
ip, Operand(scratch2));
2437 __ b(
ne, ¬_smi_result);
2439 __ cmp(scratch1, Operand(0));
2444 __ add(scratch2, right, Operand(left),
SetCC);
2452 __ JumpIfNotPowerOfTwoOrZero(right, scratch1, ¬_smi_result);
2454 __ orr(scratch2, scratch1, Operand(0x80000000u));
2455 __ tst(left, scratch2);
2456 __ b(
ne, ¬_smi_result);
2459 __ CountLeadingZeros(scratch1, scratch1, scratch2);
2460 __ rsb(scratch1, scratch1, Operand(31));
2461 __ mov(right, Operand(left,
LSR, scratch1));
2466 __ orr(scratch1, left, Operand(right));
2468 __ b(
ne, ¬_smi_result);
2471 __ JumpIfNotPowerOfTwoOrZero(right, scratch1, ¬_smi_result);
2474 __ and_(right, left, Operand(scratch1));
2478 __ orr(right, left, Operand(right));
2481 case Token::BIT_AND:
2482 __ and_(right, left, Operand(right));
2485 case Token::BIT_XOR:
2486 __ eor(right, left, Operand(right));
2491 __ GetLeastBitsFromSmi(scratch1, right, 5);
2492 __ mov(right, Operand(left,
ASR, scratch1));
2500 __ SmiUntag(scratch1, left);
2501 __ GetLeastBitsFromSmi(scratch2, right, 5);
2502 __ mov(scratch1, Operand(scratch1,
LSR, scratch2));
2505 __ tst(scratch1, Operand(0xc0000000));
2506 __ b(
ne, ¬_smi_result);
2508 __ SmiTag(right, scratch1);
2513 __ SmiUntag(scratch1, left);
2514 __ GetLeastBitsFromSmi(scratch2, right, 5);
2515 __ mov(scratch1, Operand(scratch1,
LSL, scratch2));
2517 __ add(scratch2, scratch1, Operand(0x40000000),
SetCC);
2518 __ b(
mi, ¬_smi_result);
2519 __ SmiTag(right, scratch1);
2525 __ bind(¬_smi_result);
2529 void BinaryOpStub::GenerateFPOperation(MacroAssembler* masm,
2532 Label* gc_required) {
2534 Register right =
r0;
2535 Register scratch1 =
r7;
2536 Register scratch2 =
r9;
2537 Register scratch3 =
r4;
2539 ASSERT(smi_operands || (not_numbers !=
NULL));
2542 __ AssertSmi(right);
2545 Register heap_number_map =
r6;
2546 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
2563 Register result =
r5;
2564 GenerateHeapResultAllocation(
2565 masm, result, heap_number_map, scratch1, scratch2, gc_required);
2584 CpuFeatures::Scope scope(
VFP2);
2612 if (FLAG_debug_code) {
2613 __ stop(
"Unreachable code.");
2619 case Token::BIT_XOR:
2620 case Token::BIT_AND:
2625 __ SmiUntag(
r3, left);
2626 __ SmiUntag(
r2, right);
2649 Label result_not_a_smi;
2654 case Token::BIT_XOR:
2657 case Token::BIT_AND:
2662 __ GetLeastBitsFromInt32(
r2,
r2, 5);
2667 __ GetLeastBitsFromInt32(
r2,
r2, 5);
2674 __ b(
mi, &result_not_a_smi);
2676 __ b(
mi, not_numbers);
2681 __ GetLeastBitsFromInt32(
r2,
r2, 5);
2690 __ b(
mi, &result_not_a_smi);
2695 __ bind(&result_not_a_smi);
2696 Register result =
r5;
2698 __ AllocateHeapNumber(
2699 result, scratch1, scratch2, heap_number_map, gc_required);
2701 GenerateHeapResultAllocation(
2702 masm, result, heap_number_map, scratch1, scratch2, gc_required);
2715 CpuFeatures::Scope scope(
VFP2);
2717 if (op_ == Token::SHR) {
2728 WriteInt32ToHeapNumberStub stub(
r2,
r0,
r3);
2729 __ TailCallStub(&stub);
2743 void BinaryOpStub::GenerateSmiCode(
2744 MacroAssembler* masm,
2747 SmiCodeGenerateHeapNumberResults allow_heapnumber_results) {
2751 Register right =
r0;
2752 Register scratch1 =
r7;
2755 __ orr(scratch1, left, Operand(right));
2757 __ JumpIfNotSmi(scratch1, ¬_smis);
2760 GenerateSmiSmiOperation(masm);
2764 if (allow_heapnumber_results == ALLOW_HEAPNUMBER_RESULTS) {
2765 GenerateFPOperation(masm,
true, use_runtime, gc_required);
2771 void BinaryOpStub::GenerateSmiStub(MacroAssembler* masm) {
2772 Label not_smis, call_runtime;
2777 GenerateSmiCode(masm, &call_runtime,
NULL, NO_HEAPNUMBER_RESULTS);
2781 GenerateSmiCode(masm,
2784 ALLOW_HEAPNUMBER_RESULTS);
2789 GenerateTypeTransition(masm);
2791 __ bind(&call_runtime);
2792 GenerateCallRuntime(masm);
2796 void BinaryOpStub::GenerateStringStub(MacroAssembler* masm) {
2801 GenerateAddStrings(masm);
2802 GenerateTypeTransition(masm);
2806 void BinaryOpStub::GenerateBothStringStub(MacroAssembler* masm) {
2815 Register right =
r0;
2818 __ JumpIfSmi(left, &call_runtime);
2820 __ b(
ge, &call_runtime);
2823 __ JumpIfSmi(right, &call_runtime);
2825 __ b(
ge, &call_runtime);
2828 GenerateRegisterArgsPush(masm);
2829 __ TailCallStub(&string_add_stub);
2831 __ bind(&call_runtime);
2832 GenerateTypeTransition(masm);
2836 void BinaryOpStub::GenerateInt32Stub(MacroAssembler* masm) {
2840 Register right =
r0;
2841 Register scratch1 =
r7;
2842 Register scratch2 =
r9;
2843 DwVfpRegister double_scratch =
d0;
2845 Register heap_number_result =
no_reg;
2846 Register heap_number_map =
r6;
2847 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
2857 __ orr(scratch1, left, right);
2858 __ JumpIfNotSmi(scratch1, &skip);
2859 GenerateSmiSmiOperation(masm);
2903 CpuFeatures::Scope scope(
VFP2);
2904 Label return_heap_number;
2937 __ b(
ne, &transition);
2941 __ add(scratch2, scratch1, Operand(0x40000000),
SetCC);
2943 __ b(
mi, &return_heap_number);
2946 __ cmp(scratch1, Operand::Zero());
2947 __ b(
ne, ¬_zero);
2950 __ b(
ne, &return_heap_number);
2954 __ SmiTag(
r0, scratch1);
2960 __ bind(&return_heap_number);
2966 heap_number_result =
r5;
2967 GenerateHeapResultAllocation(masm,
2975 __ mov(
r0, heap_number_result);
2987 Label pop_and_call_runtime;
2990 heap_number_result =
r5;
2991 GenerateHeapResultAllocation(masm,
2996 &pop_and_call_runtime);
3003 masm, op_, heap_number_result, scratch1);
3004 if (FLAG_debug_code) {
3005 __ stop(
"Unreachable code.");
3008 __ bind(&pop_and_call_runtime);
3010 __ b(&call_runtime);
3017 case Token::BIT_XOR:
3018 case Token::BIT_AND:
3022 Label return_heap_number;
3023 Register scratch3 =
r5;
3054 case Token::BIT_XOR:
3057 case Token::BIT_AND:
3061 __ and_(
r2,
r2, Operand(0x1f));
3065 __ and_(
r2,
r2, Operand(0x1f));
3074 __ b(
mi, (result_type_ <= BinaryOpIC::INT32)
3076 : &return_heap_number);
3078 __ b(
mi, (result_type_ <= BinaryOpIC::INT32)
3084 __ and_(
r2,
r2, Operand(0x1f));
3092 __ add(scratch1,
r2, Operand(0x40000000),
SetCC);
3094 __ b(
mi, &return_heap_number);
3099 __ bind(&return_heap_number);
3100 heap_number_result =
r5;
3101 GenerateHeapResultAllocation(masm,
3109 CpuFeatures::Scope scope(
VFP2);
3110 if (op_ != Token::SHR) {
3112 __ vmov(double_scratch.low(),
r2);
3113 __ vcvt_f64_s32(double_scratch, double_scratch.low());
3116 __ vmov(double_scratch.low(),
r2);
3117 __ vcvt_f64_u32(double_scratch, double_scratch.low());
3123 __ mov(
r0, heap_number_result);
3129 WriteInt32ToHeapNumberStub stub(
r2,
r0,
r3);
3130 __ TailCallStub(&stub);
3143 if (transition.is_linked() ||
3144 ((op_ ==
Token::DIV) && (result_type_ <= BinaryOpIC::INT32))) {
3145 __ bind(&transition);
3146 GenerateTypeTransition(masm);
3149 __ bind(&call_runtime);
3150 GenerateCallRuntime(masm);
3154 void BinaryOpStub::GenerateOddballStub(MacroAssembler* masm) {
3160 GenerateAddStrings(masm);
3165 __ CompareRoot(
r1, Heap::kUndefinedValueRootIndex);
3170 __ LoadRoot(
r1, Heap::kNanValueRootIndex);
3174 __ CompareRoot(
r0, Heap::kUndefinedValueRootIndex);
3179 __ LoadRoot(
r0, Heap::kNanValueRootIndex);
3183 GenerateHeapNumberStub(masm);
3187 void BinaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) {
3189 GenerateFPOperation(masm,
false, &call_runtime, &call_runtime);
3191 __ bind(&call_runtime);
3192 GenerateCallRuntime(masm);
3196 void BinaryOpStub::GenerateGeneric(MacroAssembler* masm) {
3197 Label call_runtime, call_string_add_or_runtime;
3199 GenerateSmiCode(masm, &call_runtime, &call_runtime, ALLOW_HEAPNUMBER_RESULTS);
3201 GenerateFPOperation(masm,
false, &call_string_add_or_runtime, &call_runtime);
3203 __ bind(&call_string_add_or_runtime);
3205 GenerateAddStrings(masm);
3208 __ bind(&call_runtime);
3209 GenerateCallRuntime(masm);
3213 void BinaryOpStub::GenerateAddStrings(MacroAssembler* masm) {
3215 Label left_not_string, call_runtime;
3218 Register right =
r0;
3221 __ JumpIfSmi(left, &left_not_string);
3223 __ b(
ge, &left_not_string);
3226 GenerateRegisterArgsPush(masm);
3227 __ TailCallStub(&string_add_left_stub);
3230 __ bind(&left_not_string);
3231 __ JumpIfSmi(right, &call_runtime);
3233 __ b(
ge, &call_runtime);
3236 GenerateRegisterArgsPush(masm);
3237 __ TailCallStub(&string_add_right_stub);
3240 __ bind(&call_runtime);
3244 void BinaryOpStub::GenerateCallRuntime(MacroAssembler* masm) {
3245 GenerateRegisterArgsPush(masm);
3265 case Token::BIT_AND:
3268 case Token::BIT_XOR:
3286 void BinaryOpStub::GenerateHeapResultAllocation(MacroAssembler* masm,
3288 Register heap_number_map,
3291 Label* gc_required) {
3297 Label skip_allocation, allocated;
3301 __ JumpIfNotSmi(overwritable_operand, &skip_allocation);
3303 __ AllocateHeapNumber(
3304 result, scratch1, scratch2, heap_number_map, gc_required);
3306 __ bind(&skip_allocation);
3308 __ mov(result, Operand(overwritable_operand));
3309 __ bind(&allocated);
3312 __ AllocateHeapNumber(
3313 result, scratch1, scratch2, heap_number_map, gc_required);
3318 void BinaryOpStub::GenerateRegisterArgsPush(MacroAssembler* masm) {
3329 Label input_not_smi;
3332 Label invalid_cache;
3333 const Register scratch0 =
r9;
3334 const Register scratch1 =
r7;
3335 const Register cache_entry =
r0;
3336 const bool tagged = (argument_type_ ==
TAGGED);
3339 CpuFeatures::Scope scope(
VFP2);
3343 __ JumpIfNotSmi(
r0, &input_not_smi);
3347 __ IntegerToDoubleConversionWithVFP3(
r0,
r3,
r2);
3350 __ bind(&input_not_smi);
3354 Heap::kHeapNumberMapRootIndex,
3374 __ And(
r1,
r1, Operand(TranscendentalCache::SubCache::kCacheSize - 1));
3379 Isolate* isolate = masm->isolate();
3380 ExternalReference cache_array =
3381 ExternalReference::transcendental_cache_array_address(isolate);
3382 __ mov(cache_entry, Operand(cache_array));
3384 int cache_array_index
3385 = type_ *
sizeof(isolate->transcendental_cache()->caches_[0]);
3386 __ ldr(cache_entry,
MemOperand(cache_entry, cache_array_index));
3390 __ b(
eq, &invalid_cache);
3394 { TranscendentalCache::SubCache::Element test_elem[2];
3395 char* elem_start =
reinterpret_cast<char*
>(&test_elem[0]);
3396 char* elem2_start =
reinterpret_cast<char*
>(&test_elem[1]);
3397 char* elem_in0 =
reinterpret_cast<char*
>(&(test_elem[0].in[0]));
3398 char* elem_in1 =
reinterpret_cast<char*
>(&(test_elem[0].in[1]));
3399 char* elem_out =
reinterpret_cast<char*
>(&(test_elem[0].output));
3400 CHECK_EQ(12, elem2_start - elem_start);
3401 CHECK_EQ(0, elem_in0 - elem_start);
3409 __ add(cache_entry, cache_entry, Operand(
r1,
LSL, 2));
3414 __ b(
ne, &calculate);
3416 Counters* counters = masm->isolate()->counters();
3417 __ IncrementCounter(
3418 counters->transcendental_cache_hit(), 1, scratch0, scratch1);
3430 __ bind(&calculate);
3431 Counters* counters = masm->isolate()->counters();
3432 __ IncrementCounter(
3433 counters->transcendental_cache_miss(), 1, scratch0, scratch1);
3435 __ bind(&invalid_cache);
3436 ExternalReference runtime_function =
3437 ExternalReference(RuntimeFunction(), masm->isolate());
3438 __ TailCallExternalReference(runtime_function, 1, 1);
3441 CpuFeatures::Scope scope(
VFP2);
3450 __ Push(
r3,
r2, cache_entry);
3451 GenerateCallCFunction(masm, scratch0);
3452 __ GetCFunctionDoubleResult(
d2);
3456 __ Pop(
r3,
r2, cache_entry);
3457 __ LoadRoot(
r5, Heap::kHeapNumberMapRootIndex);
3458 __ AllocateHeapNumber(
r6, scratch0, scratch1,
r5, &no_update);
3463 __ bind(&invalid_cache);
3466 __ LoadRoot(
r5, Heap::kHeapNumberMapRootIndex);
3467 __ AllocateHeapNumber(
r0, scratch0, scratch1,
r5, &skip_cache);
3472 __ CallRuntime(RuntimeFunction(), 1);
3477 __ bind(&skip_cache);
3480 GenerateCallCFunction(masm, scratch0);
3481 __ GetCFunctionDoubleResult(
d2);
3482 __ bind(&no_update);
3491 __ mov(scratch0, Operand(4 * kPointerSize));
3493 __ CallRuntimeSaveDoubles(Runtime::kAllocateInNewSpace);
3500 void TranscendentalCacheStub::GenerateCallCFunction(MacroAssembler* masm,
3503 Isolate* isolate = masm->isolate();
3506 __ PrepareCallCFunction(0, 1, scratch);
3507 if (masm->use_eabi_hardfloat()) {
3512 AllowExternalCallThatCantCauseGC scope(masm);
3515 __ CallCFunction(ExternalReference::math_sin_double_function(isolate),
3519 __ CallCFunction(ExternalReference::math_cos_double_function(isolate),
3523 __ CallCFunction(ExternalReference::math_tan_double_function(isolate),
3527 __ CallCFunction(ExternalReference::math_log_double_function(isolate),
3547 return Runtime::kAbort;
3553 __ TailCallRuntime(Runtime::kStackGuard, 0, 1);
3558 __ TailCallRuntime(Runtime::kInterrupt, 0, 1);
3563 CpuFeatures::Scope vfp2_scope(
VFP2);
3564 const Register base =
r1;
3565 const Register exponent =
r2;
3566 const Register heapnumbermap =
r5;
3567 const Register heapnumber =
r0;
3572 const SwVfpRegister single_scratch =
s0;
3573 const Register scratch =
r9;
3574 const Register scratch2 =
r7;
3576 Label call_runtime, done, int_exponent;
3578 Label base_is_smi, unpack_exponent;
3585 __ LoadRoot(heapnumbermap, Heap::kHeapNumberMapRootIndex);
3587 __ UntagAndJumpIfSmi(scratch, base, &base_is_smi);
3589 __ cmp(scratch, heapnumbermap);
3590 __ b(
ne, &call_runtime);
3593 __ jmp(&unpack_exponent);
3595 __ bind(&base_is_smi);
3596 __ vmov(single_scratch, scratch);
3597 __ vcvt_f64_s32(double_base, single_scratch);
3598 __ bind(&unpack_exponent);
3600 __ UntagAndJumpIfSmi(scratch, exponent, &int_exponent);
3603 __ cmp(scratch, heapnumbermap);
3604 __ b(
ne, &call_runtime);
3605 __ vldr(double_exponent,
3607 }
else if (exponent_type_ ==
TAGGED) {
3609 __ UntagAndJumpIfSmi(scratch, exponent, &int_exponent);
3611 __ vldr(double_exponent,
3615 if (exponent_type_ !=
INTEGER) {
3616 Label int_exponent_convert;
3618 __ vcvt_u32_f64(single_scratch, double_exponent);
3621 __ vcvt_f64_u32(double_scratch, single_scratch);
3622 __ VFPCompareAndSetFlags(double_scratch, double_exponent);
3623 __ b(
eq, &int_exponent_convert);
3629 Label not_plus_half;
3632 __ vmov(double_scratch, 0.5, scratch);
3633 __ VFPCompareAndSetFlags(double_exponent, double_scratch);
3634 __ b(
ne, ¬_plus_half);
3639 __ VFPCompareAndSetFlags(double_base, double_scratch);
3640 __ vneg(double_result, double_scratch,
eq);
3645 __ vsqrt(double_result, double_scratch);
3648 __ bind(¬_plus_half);
3649 __ vmov(double_scratch, -0.5, scratch);
3650 __ VFPCompareAndSetFlags(double_exponent, double_scratch);
3651 __ b(
ne, &call_runtime);
3656 __ VFPCompareAndSetFlags(double_base, double_scratch);
3662 __ vmov(double_result, 1.0, scratch);
3663 __ vsqrt(double_scratch, double_scratch);
3664 __ vdiv(double_result, double_result, double_scratch);
3670 AllowExternalCallThatCantCauseGC scope(masm);
3671 __ PrepareCallCFunction(0, 2, scratch);
3672 __ SetCallCDoubleArguments(double_base, double_exponent);
3674 ExternalReference::power_double_double_function(masm->isolate()),
3678 __ GetCFunctionDoubleResult(double_result);
3681 __ bind(&int_exponent_convert);
3682 __ vcvt_u32_f64(single_scratch, double_exponent);
3683 __ vmov(scratch, single_scratch);
3687 __ bind(&int_exponent);
3690 if (exponent_type_ ==
INTEGER) {
3691 __ mov(scratch, exponent);
3694 __ mov(exponent, scratch);
3696 __ vmov(double_scratch, double_base);
3697 __ vmov(double_result, 1.0, scratch2);
3700 __ cmp(scratch, Operand(0));
3705 __ bind(&while_true);
3706 __ mov(scratch, Operand(scratch,
ASR, 1),
SetCC);
3707 __ vmul(double_result, double_result, double_scratch,
cs);
3708 __ vmul(double_scratch, double_scratch, double_scratch,
ne);
3709 __ b(
ne, &while_true);
3711 __ cmp(exponent, Operand(0));
3713 __ vmov(double_scratch, 1.0, scratch);
3714 __ vdiv(double_result, double_scratch, double_result);
3717 __ VFPCompareAndSetFlags(double_result, 0.0);
3721 __ vmov(single_scratch, exponent);
3722 __ vcvt_f64_s32(double_exponent, single_scratch);
3725 Counters* counters = masm->isolate()->counters();
3728 __ bind(&call_runtime);
3729 __ TailCallRuntime(Runtime::kMath_pow_cfunction, 2, 1);
3734 __ AllocateHeapNumber(
3735 heapnumber, scratch, scratch2, heapnumbermap, &call_runtime);
3736 __ vstr(double_result,
3739 __ IncrementCounter(counters->math_pow(), 1, scratch, scratch2);
3744 AllowExternalCallThatCantCauseGC scope(masm);
3745 __ PrepareCallCFunction(0, 2, scratch);
3746 __ SetCallCDoubleArguments(double_base, double_exponent);
3748 ExternalReference::power_double_double_function(masm->isolate()),
3752 __ GetCFunctionDoubleResult(double_result);
3755 __ IncrementCounter(counters->math_pow(), 1, scratch, scratch2);
3761 bool CEntryStub::NeedsImmovableCode() {
3767 return (!save_doubles_ ||
ISOLATE->fp_stubs_generated()) &&
3772 void CodeStub::GenerateStubsAheadOfTime() {
3780 void CodeStub::GenerateFPStubs() {
3782 Handle<Code>
code = save_doubles.GetCode();
3783 code->set_is_pregenerated(
true);
3785 stub.GetCode()->set_is_pregenerated(
true);
3786 code->GetIsolate()->set_fp_stubs_generated(
true);
3792 Handle<Code> code = stub.GetCode();
3793 code->set_is_pregenerated(
true);
3797 void CEntryStub::GenerateCore(MacroAssembler* masm,
3798 Label* throw_normal_exception,
3799 Label* throw_termination_exception,
3800 Label* throw_out_of_memory_exception,
3802 bool always_allocate) {
3807 Isolate* isolate = masm->isolate();
3811 __ PrepareCallCFunction(1, 0,
r1);
3812 __ CallCFunction(ExternalReference::perform_gc_function(isolate),
3816 ExternalReference scope_depth =
3817 ExternalReference::heap_always_allocate_scope_depth(isolate);
3818 if (always_allocate) {
3819 __ mov(
r0, Operand(scope_depth));
3821 __ add(
r1,
r1, Operand(1));
3830 #if defined(V8_HOST_ARCH_ARM)
3832 int frame_alignment_mask = frame_alignment - 1;
3833 if (FLAG_debug_code) {
3834 if (frame_alignment > kPointerSize) {
3835 Label alignment_as_expected;
3837 __ tst(
sp, Operand(frame_alignment_mask));
3838 __ b(
eq, &alignment_as_expected);
3840 __ stop(
"Unexpected alignment");
3841 __ bind(&alignment_as_expected);
3846 __ mov(
r2, Operand(ExternalReference::isolate_address()));
3858 masm->add(
lr,
pc, Operand(4));
3863 if (always_allocate) {
3866 __ mov(
r2, Operand(scope_depth));
3868 __ sub(
r3,
r3, Operand(1));
3873 Label failure_returned;
3876 __ add(
r2,
r0, Operand(1));
3878 __ b(
eq, &failure_returned);
3885 __ LeaveExitFrame(save_doubles_,
r4);
3890 __ bind(&failure_returned);
3897 __ cmp(
r0, Operand(reinterpret_cast<int32_t>(out_of_memory)));
3898 __ b(
eq, throw_out_of_memory_exception);
3901 __ mov(
r3, Operand(isolate->factory()->the_hole_value()));
3902 __ mov(
ip, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
3909 __ cmp(
r0, Operand(isolate->factory()->termination_exception()));
3910 __ b(
eq, throw_termination_exception);
3913 __ jmp(throw_normal_exception);
3936 __ sub(
r6,
r6, Operand(kPointerSize));
3939 FrameScope scope(masm, StackFrame::MANUAL);
3940 __ EnterExitFrame(save_doubles_);
3950 Label throw_normal_exception;
3951 Label throw_termination_exception;
3952 Label throw_out_of_memory_exception;
3956 &throw_normal_exception,
3957 &throw_termination_exception,
3958 &throw_out_of_memory_exception,
3964 &throw_normal_exception,
3965 &throw_termination_exception,
3966 &throw_out_of_memory_exception,
3972 __ mov(
r0, Operand(reinterpret_cast<int32_t>(failure)));
3974 &throw_normal_exception,
3975 &throw_termination_exception,
3976 &throw_out_of_memory_exception,
3980 __ bind(&throw_out_of_memory_exception);
3982 Isolate* isolate = masm->isolate();
3983 ExternalReference external_caught(Isolate::kExternalCaughtExceptionAddress,
3986 __ mov(
r2, Operand(external_caught));
3991 __ mov(
r0, Operand(reinterpret_cast<int32_t>(out_of_memory)));
3992 __ mov(
r2, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
3997 __ bind(&throw_termination_exception);
3998 __ ThrowUncatchable(
r0);
4000 __ bind(&throw_normal_exception);
4012 Label invoke, handler_entry, exit;
4020 CpuFeatures::Scope scope(
VFP2);
4046 Isolate* isolate = masm->isolate();
4047 __ mov(
r8, Operand(-1));
4048 int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY;
4052 Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate)));
4060 Label non_outermost_js;
4061 ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate);
4062 __ mov(
r5, Operand(ExternalReference(js_entry_sp)));
4064 __ cmp(
r6, Operand::Zero());
4065 __ b(
ne, &non_outermost_js);
4070 __ bind(&non_outermost_js);
4084 __ bind(&handler_entry);
4085 handler_offset_ = handler_entry.pos();
4090 __ mov(
ip, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
4101 __ PushTryHandler(StackHandler::JS_ENTRY, 0);
4108 __ mov(
r5, Operand(isolate->factory()->the_hole_value()));
4109 __ mov(
ip, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
4124 ExternalReference construct_entry(Builtins::kJSConstructEntryTrampoline,
4126 __ mov(
ip, Operand(construct_entry));
4128 ExternalReference entry(Builtins::kJSEntryTrampoline, isolate);
4129 __ mov(
ip, Operand(entry));
4148 Label non_outermost_js_2;
4151 __ b(
ne, &non_outermost_js_2);
4152 __ mov(
r6, Operand::Zero());
4153 __ mov(
r5, Operand(ExternalReference(js_entry_sp)));
4155 __ bind(&non_outermost_js_2);
4160 Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate)));
4168 if (FLAG_debug_code) {
4174 CpuFeatures::Scope scope(
VFP2);
4194 ASSERT(HasArgsInRegisters() || !HasCallSiteInlineCheck());
4196 ASSERT(!ReturnTrueFalseObject() || HasCallSiteInlineCheck());
4199 const Register
object =
r0;
4201 const Register
function =
r1;
4202 const Register prototype =
r4;
4203 const Register inline_site =
r9;
4204 const Register scratch =
r2;
4208 Label slow, loop, is_instance, is_not_instance, not_js_object;
4210 if (!HasArgsInRegisters()) {
4216 __ JumpIfSmi(
object, ¬_js_object);
4217 __ IsObjectJSObjectType(
object, map, scratch, ¬_js_object);
4221 if (!HasCallSiteInlineCheck()) {
4223 __ CompareRoot(
function, Heap::kInstanceofCacheFunctionRootIndex);
4225 __ CompareRoot(map, Heap::kInstanceofCacheMapRootIndex);
4227 __ LoadRoot(
r0, Heap::kInstanceofCacheAnswerRootIndex);
4228 __ Ret(HasArgsInRegisters() ? 0 : 2);
4234 __ TryGetFunctionPrototype(
function, prototype, scratch, &slow,
true);
4237 __ JumpIfSmi(prototype, &slow);
4238 __ IsObjectJSObjectType(prototype, scratch, scratch, &slow);
4242 if (!HasCallSiteInlineCheck()) {
4243 __ StoreRoot(
function, Heap::kInstanceofCacheFunctionRootIndex);
4244 __ StoreRoot(map, Heap::kInstanceofCacheMapRootIndex);
4246 ASSERT(HasArgsInRegisters());
4251 __ LoadFromSafepointRegisterSlot(scratch,
r4);
4252 __ sub(inline_site,
lr, scratch);
4254 __ GetRelocatedValueLocation(inline_site, scratch);
4264 Register scratch2 = map;
4268 __ LoadRoot(scratch2, Heap::kNullValueRootIndex);
4270 __ cmp(scratch, Operand(prototype));
4271 __ b(
eq, &is_instance);
4272 __ cmp(scratch, scratch2);
4273 __ b(
eq, &is_not_instance);
4278 __ bind(&is_instance);
4279 if (!HasCallSiteInlineCheck()) {
4281 __ StoreRoot(
r0, Heap::kInstanceofCacheAnswerRootIndex);
4284 __ LoadRoot(
r0, Heap::kTrueValueRootIndex);
4285 __ add(inline_site, inline_site, Operand(kDeltaToLoadBoolResult));
4287 __ GetRelocatedValueLocation(inline_site, scratch);
4290 if (!ReturnTrueFalseObject()) {
4294 __ Ret(HasArgsInRegisters() ? 0 : 2);
4296 __ bind(&is_not_instance);
4297 if (!HasCallSiteInlineCheck()) {
4299 __ StoreRoot(
r0, Heap::kInstanceofCacheAnswerRootIndex);
4302 __ LoadRoot(
r0, Heap::kFalseValueRootIndex);
4303 __ add(inline_site, inline_site, Operand(kDeltaToLoadBoolResult));
4305 __ GetRelocatedValueLocation(inline_site, scratch);
4308 if (!ReturnTrueFalseObject()) {
4312 __ Ret(HasArgsInRegisters() ? 0 : 2);
4314 Label object_not_null, object_not_null_or_smi;
4315 __ bind(¬_js_object);
4318 __ JumpIfSmi(
function, &slow);
4323 __ cmp(scratch, Operand(masm->isolate()->factory()->null_value()));
4324 __ b(
ne, &object_not_null);
4326 __ Ret(HasArgsInRegisters() ? 0 : 2);
4328 __ bind(&object_not_null);
4330 __ JumpIfNotSmi(
object, &object_not_null_or_smi);
4332 __ Ret(HasArgsInRegisters() ? 0 : 2);
4334 __ bind(&object_not_null_or_smi);
4336 __ IsObjectJSStringType(
object, scratch, &slow);
4338 __ Ret(HasArgsInRegisters() ? 0 : 2);
4342 if (!ReturnTrueFalseObject()) {
4343 if (HasArgsInRegisters()) {
4353 __ cmp(
r0, Operand::Zero());
4354 __ LoadRoot(
r0, Heap::kTrueValueRootIndex,
eq);
4355 __ LoadRoot(
r0, Heap::kFalseValueRootIndex,
ne);
4356 __ Ret(HasArgsInRegisters() ? 0 : 2);
4367 void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
4370 const int kDisplacement =
4375 __ JumpIfNotSmi(
r1, &slow);
4414 __ TailCallRuntime(Runtime::kGetArgumentsProperty, 1, 1);
4418 void ArgumentsAccessStub::GenerateNewNonStrictSlow(MacroAssembler* masm) {
4438 __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1);
4442 void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) {
4456 Label adaptor_frame, try_allocate;
4460 __ b(
eq, &adaptor_frame);
4464 __ b(&try_allocate);
4467 __ bind(&adaptor_frame);
4479 __ bind(&try_allocate);
4483 const int kParameterMapHeaderSize =
4504 const int kNormalOffset =
4506 const int kAliasedOffset =
4511 __ cmp(
r1, Operand::Zero());
4528 const int kCalleeOffset = JSObject::kHeaderSize +
4534 const int kLengthOffset = JSObject::kHeaderSize +
4549 Label skip_parameter_map;
4554 __ b(
eq, &skip_parameter_map);
4556 __ LoadRoot(
r6, Heap::kNonStrictArgumentsElementsMapRootIndex);
4562 __ add(
r6,
r6, Operand(kParameterMapHeaderSize));
4573 Label parameters_loop, parameters_test;
4578 __ LoadRoot(
r7, Heap::kTheHoleValueRootIndex);
4580 __ add(
r3,
r3, Operand(kParameterMapHeaderSize));
4588 __ jmp(¶meters_test);
4590 __ bind(¶meters_loop);
4598 __ bind(¶meters_test);
4600 __ b(
ne, ¶meters_loop);
4602 __ bind(&skip_parameter_map);
4607 __ LoadRoot(
r5, Heap::kFixedArrayMapRootIndex);
4611 Label arguments_loop, arguments_test;
4615 __ jmp(&arguments_test);
4617 __ bind(&arguments_loop);
4618 __ sub(
r4,
r4, Operand(kPointerSize));
4624 __ bind(&arguments_test);
4626 __ b(
lt, &arguments_loop);
4629 __ add(
sp,
sp, Operand(3 * kPointerSize));
4636 __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1);
4640 void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
4645 Label adaptor_frame, try_allocate, runtime;
4649 __ b(
eq, &adaptor_frame);
4653 __ b(&try_allocate);
4656 __ bind(&adaptor_frame);
4665 Label add_arguments_object;
4666 __ bind(&try_allocate);
4668 __ b(
eq, &add_arguments_object);
4671 __ bind(&add_arguments_object);
4675 __ AllocateInNewSpace(
r1,
4710 __ LoadRoot(
r3, Heap::kFixedArrayMapRootIndex);
4726 __ sub(
r1,
r1, Operand(1));
4732 __ add(
sp,
sp, Operand(3 * kPointerSize));
4737 __ TailCallRuntime(Runtime::kNewStrictArgumentsFast, 3, 1);
4741 void RegExpExecStub::Generate(MacroAssembler* masm) {
4745 #ifdef V8_INTERPRETED_REGEXP
4746 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
4747 #else // V8_INTERPRETED_REGEXP
4760 Label runtime, invoke_regexp;
4767 Register subject =
r4;
4768 Register regexp_data =
r5;
4769 Register last_match_info_elements =
r6;
4772 Isolate* isolate = masm->isolate();
4773 ExternalReference address_of_regexp_stack_memory_address =
4774 ExternalReference::address_of_regexp_stack_memory_address(isolate);
4775 ExternalReference address_of_regexp_stack_memory_size =
4776 ExternalReference::address_of_regexp_stack_memory_size(isolate);
4777 __ mov(
r0, Operand(address_of_regexp_stack_memory_size));
4779 __ cmp(
r0, Operand(0));
4785 __ JumpIfSmi(
r0, &runtime);
4791 if (FLAG_debug_code) {
4793 __ Check(
ne,
"Unexpected type for RegExp data, FixedArray expected");
4795 __ Check(
eq,
"Unexpected type for RegExp data, FixedArray expected");
4812 __ add(
r2,
r2, Operand(2));
4821 __ JumpIfSmi(subject, &runtime);
4822 Condition is_string = masm->IsObjectStringType(subject,
r0);
4834 __ JumpIfNotSmi(
r0, &runtime);
4843 __ JumpIfSmi(
r0, &runtime);
4847 __ ldr(last_match_info_elements,
4850 __ CompareRoot(
r0, Heap::kFixedArrayMapRootIndex);
4861 __ mov(
r9, Operand(0));
4877 __ b(
eq, &seq_string);
4888 Label cons_string, external_string, check_encoding;
4894 __ b(
lt, &cons_string);
4895 __ b(
eq, &external_string);
4907 __ jmp(&check_encoding);
4909 __ bind(&cons_string);
4911 __ CompareRoot(
r0, Heap::kEmptyStringRootIndex);
4915 __ bind(&check_encoding);
4920 __ b(
ne, &external_string);
4922 __ bind(&seq_string);
4937 __ JumpIfSmi(
r7, &runtime);
4954 __ IncrementCounter(isolate->counters()->regexp_entry_native(), 1,
r0,
r2);
4957 const int kRegExpExecuteArguments = 9;
4958 const int kParameterRegisters = 4;
4959 __ EnterExitFrame(
false, kRegExpExecuteArguments - kParameterRegisters);
4965 __ mov(
r0, Operand(ExternalReference::isolate_address()));
4969 __ mov(
r0, Operand(1));
4973 __ mov(
r0, Operand(address_of_regexp_stack_memory_address));
4975 __ mov(
r2, Operand(address_of_regexp_stack_memory_size));
4982 __ mov(
r0, Operand(0));
4987 Operand(ExternalReference::address_of_static_offsets_vector(isolate)));
4993 __ eor(
r3,
r3, Operand(1));
4998 __ ldr(subject,
MemOperand(
fp, kSubjectOffset + 2 * kPointerSize));
5014 __ mov(
r0, subject);
5018 DirectCEntryStub stub;
5019 stub.GenerateCall(masm,
r7);
5031 __ cmp(
r0, Operand(1));
5045 __ mov(
r1, Operand(isolate->factory()->the_hole_value()));
5046 __ mov(
r2, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
5055 __ CompareRoot(
r0, Heap::kTerminationExceptionRootIndex);
5057 Label termination_exception;
5058 __ b(
eq, &termination_exception);
5062 __ bind(&termination_exception);
5063 __ ThrowUncatchable(
r0);
5067 __ mov(
r0, Operand(masm->isolate()->factory()->null_value()));
5068 __ add(
sp,
sp, Operand(4 * kPointerSize));
5078 __ add(
r1,
r1, Operand(2));
5090 __ mov(
r2, subject);
5091 __ RecordWriteField(last_match_info_elements,
5100 __ RecordWriteField(last_match_info_elements,
5108 ExternalReference address_of_static_offsets_vector =
5109 ExternalReference::address_of_static_offsets_vector(isolate);
5110 __ mov(
r2, Operand(address_of_static_offsets_vector));
5114 Label next_capture, done;
5118 last_match_info_elements,
5120 __ bind(&next_capture);
5128 __ jmp(&next_capture);
5133 __ add(
sp,
sp, Operand(4 * kPointerSize));
5138 __ bind(&external_string);
5141 if (FLAG_debug_code) {
5145 __ Assert(
eq,
"external string expected, but not found");
5154 __ jmp(&seq_string);
5158 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
5159 #endif // V8_INTERPRETED_REGEXP
5163 void RegExpConstructResultStub::Generate(MacroAssembler* masm) {
5164 const int kMaxInlineLength = 100;
5167 Factory* factory = masm->isolate()->factory();
5172 __ JumpIfNotSmi(
r1, &slowcase);
5174 __ b(
hi, &slowcase);
5184 __ add(
r2,
r5, Operand(objects_size));
5185 __ AllocateInNewSpace(
5202 __ mov(
r4, Operand(factory->empty_fixed_array()));
5223 __ mov(
r2, Operand(factory->fixed_array_map()));
5229 __ LoadRoot(
r2, Heap::kUndefinedValueRootIndex);
5237 __ cmp(
r5, Operand(0));
5245 __ add(
sp,
sp, Operand(3 * kPointerSize));
5249 __ TailCallRuntime(Runtime::kRegExpConstructResult, 3, 1);
5253 static void GenerateRecordCallTarget(MacroAssembler* masm) {
5262 masm->isolate()->heap()->undefined_value());
5264 masm->isolate()->heap()->the_hole_value());
5273 __ CompareRoot(
r3, Heap::kUndefinedValueRootIndex);
5278 __ CompareRoot(
r3, Heap::kTheHoleValueRootIndex);
5281 __ LoadRoot(
ip, Heap::kUndefinedValueRootIndex,
ne);
5295 Label slow, non_function;
5300 if (ReceiverMightBeImplicit()) {
5306 __ CompareRoot(
r4, Heap::kTheHoleValueRootIndex);
5318 __ JumpIfSmi(
r1, &non_function);
5323 if (RecordCallTarget()) {
5324 GenerateRecordCallTarget(masm);
5329 ParameterCount actual(argc_);
5331 if (ReceiverMightBeImplicit()) {
5332 Label call_as_function;
5333 __ CompareRoot(
r4, Heap::kTheHoleValueRootIndex);
5334 __ b(
eq, &call_as_function);
5335 __ InvokeFunction(
r1,
5340 __ bind(&call_as_function);
5342 __ InvokeFunction(
r1,
5350 if (RecordCallTarget()) {
5355 masm->isolate()->heap()->undefined_value());
5356 __ LoadRoot(
ip, Heap::kUndefinedValueRootIndex);
5361 __ b(
ne, &non_function);
5365 __ GetBuiltinEntry(
r3, Builtins::CALL_FUNCTION_PROXY);
5368 Handle<Code> adaptor =
5369 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline();
5370 __ Jump(adaptor, RelocInfo::CODE_TARGET);
5375 __ bind(&non_function);
5377 __ mov(
r0, Operand(argc_));
5379 __ GetBuiltinEntry(
r3, Builtins::CALL_NON_FUNCTION);
5381 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
5382 RelocInfo::CODE_TARGET);
5390 Label slow, non_function_call;
5393 __ JumpIfSmi(
r1, &non_function_call);
5398 if (RecordCallTarget()) {
5399 GenerateRecordCallTarget(masm);
5413 __ b(
ne, &non_function_call);
5414 __ GetBuiltinEntry(
r3, Builtins::CALL_FUNCTION_PROXY_AS_CONSTRUCTOR);
5417 __ bind(&non_function_call);
5418 __ GetBuiltinEntry(
r3, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR);
5423 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
5424 RelocInfo::CODE_TARGET);
5430 void CompareStub::PrintName(StringStream* stream) {
5433 const char* cc_name;
5435 case lt: cc_name =
"LT";
break;
5436 case gt: cc_name =
"GT";
break;
5437 case le: cc_name =
"LE";
break;
5438 case ge: cc_name =
"GE";
break;
5439 case eq: cc_name =
"EQ";
break;
5440 case ne: cc_name =
"NE";
break;
5441 default: cc_name =
"UnknownCondition";
break;
5443 bool is_equality = cc_ ==
eq || cc_ ==
ne;
5444 stream->Add(
"CompareStub_%s", cc_name);
5445 stream->Add(lhs_.
is(
r0) ?
"_r0" :
"_r1");
5446 stream->Add(rhs_.
is(
r0) ?
"_r0" :
"_r1");
5447 if (strict_ && is_equality) stream->Add(
"_STRICT");
5448 if (never_nan_nan_ && is_equality) stream->Add(
"_NO_NAN");
5449 if (!include_number_compare_) stream->Add(
"_NO_NUMBER");
5450 if (!include_smi_compare_) stream->Add(
"_NO_SMI");
5454 int CompareStub::MinorKey() {
5458 ASSERT((static_cast<unsigned>(cc_) >> 28) < (1 << 12));
5461 return ConditionField::encode(static_cast<unsigned>(cc_) >> 28)
5462 | RegisterField::encode(lhs_.
is(
r0))
5463 | StrictField::encode(strict_)
5464 | NeverNanNanField::encode(cc_ ==
eq ? never_nan_nan_ :
false)
5465 | IncludeNumberCompareField::encode(include_number_compare_)
5466 | IncludeSmiCompareField::encode(include_smi_compare_);
5474 Label got_char_code;
5475 Label sliced_string;
5478 __ JumpIfSmi(object_, receiver_not_string_);
5485 __ b(
ne, receiver_not_string_);
5488 __ JumpIfNotSmi(index_, &index_not_smi_);
5489 __ bind(&got_smi_index_);
5493 __ cmp(
ip, Operand(index_));
5494 __ b(
ls, index_out_of_range_);
5510 MacroAssembler* masm,
5511 const RuntimeCallHelper& call_helper) {
5512 __ Abort(
"Unexpected fallthrough to CharCodeAt slow case");
5515 __ bind(&index_not_smi_);
5519 Heap::kHeapNumberMapRootIndex,
5522 call_helper.BeforeCall(masm);
5526 __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero, 1);
5530 __ CallRuntime(Runtime::kNumberToSmi, 1);
5534 __ Move(index_,
r0);
5539 call_helper.AfterCall(masm);
5541 __ JumpIfNotSmi(index_, index_out_of_range_);
5543 __ jmp(&got_smi_index_);
5548 __ bind(&call_runtime_);
5549 call_helper.BeforeCall(masm);
5551 __ Push(object_, index_);
5552 __ CallRuntime(Runtime::kStringCharCodeAt, 2);
5553 __ Move(result_,
r0);
5554 call_helper.AfterCall(masm);
5557 __ Abort(
"Unexpected fallthrough from CharCodeAt slow case");
5572 __ b(
ne, &slow_case_);
5574 __ LoadRoot(result_, Heap::kSingleCharacterStringCacheRootIndex);
5579 __ CompareRoot(result_, Heap::kUndefinedValueRootIndex);
5580 __ b(
eq, &slow_case_);
5586 MacroAssembler* masm,
5587 const RuntimeCallHelper& call_helper) {
5588 __ Abort(
"Unexpected fallthrough to CharFromCode slow case");
5590 __ bind(&slow_case_);
5591 call_helper.BeforeCall(masm);
5593 __ CallRuntime(Runtime::kCharFromCode, 1);
5594 __ Move(result_,
r0);
5595 call_helper.AfterCall(masm);
5598 __ Abort(
"Unexpected fallthrough from CharFromCode slow case");
5612 MacroAssembler* masm,
5613 const RuntimeCallHelper& call_helper) {
5614 char_code_at_generator_.
GenerateSlow(masm, call_helper);
5615 char_from_code_generator_.
GenerateSlow(masm, call_helper);
5630 __ add(count, count, Operand(count),
SetCC);
5640 __ sub(count, count, Operand(1),
SetCC);
5649 enum CopyCharactersFlags {
5651 DEST_ALWAYS_ALIGNED = 2
5665 bool ascii = (flags & COPY_ASCII) != 0;
5666 bool dest_always_aligned = (flags & DEST_ALWAYS_ALIGNED) != 0;
5668 if (dest_always_aligned && FLAG_debug_code) {
5672 __ Check(
eq,
"Destination of copy not aligned.");
5675 const int kReadAlignment = 4;
5676 const int kReadAlignmentMask = kReadAlignment - 1;
5685 __ add(count, count, Operand(count),
SetCC);
5694 __ cmp(count, Operand(8));
5695 __ add(count, dest, Operand(count));
5696 Register limit = count;
5697 __ b(
lt, &byte_loop);
5699 if (!dest_always_aligned) {
5701 __ and_(scratch4, dest, Operand(kReadAlignmentMask),
SetCC);
5703 __ b(
eq, &dest_aligned);
5704 __ cmp(scratch4, Operand(2));
5711 __ bind(&dest_aligned);
5716 __ sub(scratch4, dest, Operand(src));
5717 __ and_(scratch4, scratch4, Operand(0x03),
SetCC);
5718 __ b(
eq, &simple_loop);
5726 __ mov(scratch4, Operand(scratch4,
LSL, 3));
5727 Register left_shift = scratch4;
5728 __ and_(src, src, Operand(~3));
5732 __ rsb(scratch2, left_shift, Operand(32));
5733 Register right_shift = scratch2;
5734 __ mov(scratch1, Operand(scratch1,
LSR, right_shift));
5738 __ sub(scratch5, limit, Operand(dest));
5739 __ orr(scratch1, scratch1, Operand(scratch3,
LSL, left_shift));
5741 __ mov(scratch1, Operand(scratch3,
LSR, right_shift));
5744 __ sub(scratch5, scratch5, Operand(8),
SetCC);
5754 __ add(scratch5, scratch5, Operand(4),
SetCC);
5756 __ cmp(scratch4, Operand(scratch5,
LSL, 3),
ne);
5761 __ cmp(scratch5, Operand(2));
5773 __ bind(&simple_loop);
5778 __ sub(scratch3, limit, Operand(dest));
5782 __ cmp(scratch3, Operand(8));
5787 __ bind(&byte_loop);
5788 __ cmp(dest, Operand(limit));
5808 Register scratch = scratch3;
5812 Label not_array_index;
5813 __ sub(scratch, c1, Operand(static_cast<int>(
'0')));
5814 __ cmp(scratch, Operand(static_cast<int>(
'9' -
'0')));
5815 __ b(
hi, ¬_array_index);
5816 __ sub(scratch, c2, Operand(static_cast<int>(
'0')));
5817 __ cmp(scratch, Operand(static_cast<int>(
'9' -
'0')));
5823 __ b(
ls, not_found);
5825 __ bind(¬_array_index);
5827 Register hash = scratch1;
5833 Register chars = c1;
5841 Register symbol_table = c2;
5844 Register undefined = scratch4;
5845 __ LoadRoot(undefined, Heap::kUndefinedValueRootIndex);
5848 Register mask = scratch2;
5850 __ mov(mask, Operand(mask,
ASR, 1));
5851 __ sub(mask, mask, Operand(1));
5854 Register first_symbol_table_element = symbol_table;
5855 __ add(first_symbol_table_element, symbol_table,
5868 const int kProbes = 4;
5869 Label found_in_symbol_table;
5870 Label next_probe[kProbes];
5871 Register candidate = scratch5;
5872 for (
int i = 0; i < kProbes; i++) {
5875 __ add(candidate, hash, Operand(SymbolTable::GetProbeOffset(i)));
5877 __ mov(candidate, hash);
5880 __ and_(candidate, candidate, Operand(mask));
5893 __ b(
ne, &is_string);
5895 __ cmp(undefined, candidate);
5896 __ b(
eq, not_found);
5898 if (FLAG_debug_code) {
5899 __ LoadRoot(
ip, Heap::kTheHoleValueRootIndex);
5900 __ cmp(
ip, candidate);
5901 __ Assert(
eq,
"oddball in symbol table is not undefined or the hole");
5903 __ jmp(&next_probe[i]);
5905 __ bind(&is_string);
5910 __ JumpIfInstanceTypeIsNotSequentialAscii(scratch, scratch, &next_probe[i]);
5915 __ b(
ne, &next_probe[i]);
5920 __ cmp(chars, scratch);
5921 __ b(
eq, &found_in_symbol_table);
5922 __ bind(&next_probe[i]);
5929 Register result = candidate;
5930 __ bind(&found_in_symbol_table);
5931 __ Move(
r0, result);
5937 Register character) {
5939 __ LoadRoot(hash, Heap::kHashSeedRootIndex);
5943 __ add(hash, hash, Operand(hash,
LSL, 10));
5945 __ eor(hash, hash, Operand(hash,
LSR, 6));
5951 Register character) {
5953 __ add(hash, hash, Operand(character));
5955 __ add(hash, hash, Operand(hash,
LSL, 10));
5957 __ eor(hash, hash, Operand(hash,
LSR, 6));
5964 __ add(hash, hash, Operand(hash,
LSL, 3));
5966 __ eor(hash, hash, Operand(hash,
LSR, 11));
5968 __ add(hash, hash, Operand(hash,
LSL, 15));
5977 void SubStringStub::Generate(MacroAssembler* masm) {
6017 __ JumpIfSmi(
r0, &runtime);
6028 __ b(
eq, &return_r0);
6039 Label underlying_unpacked, sliced_string, seq_or_external_string;
6044 __ b(
eq, &seq_or_external_string);
6047 __ b(
ne, &sliced_string);
6050 __ CompareRoot(
r5, Heap::kEmptyStringRootIndex);
6056 __ jmp(&underlying_unpacked);
6058 __ bind(&sliced_string);
6066 __ jmp(&underlying_unpacked);
6068 __ bind(&seq_or_external_string);
6072 __ bind(&underlying_unpacked);
6074 if (FLAG_string_slices) {
6082 __ b(
lt, ©_routine);
6088 Label two_byte_slice, set_slice_header;
6092 __ b(
eq, &two_byte_slice);
6093 __ AllocateAsciiSlicedString(
r0,
r2,
r6,
r7, &runtime);
6094 __ jmp(&set_slice_header);
6095 __ bind(&two_byte_slice);
6096 __ AllocateTwoByteSlicedString(
r0,
r2,
r6,
r7, &runtime);
6097 __ bind(&set_slice_header);
6103 __ bind(©_routine);
6110 Label two_byte_sequential, sequential_string, allocate_result;
6114 __ b(
eq, &sequential_string);
6123 __ jmp(&allocate_result);
6125 __ bind(&sequential_string);
6130 __ bind(&allocate_result);
6134 __ b(
eq, &two_byte_sequential);
6150 COPY_ASCII | DEST_ALWAYS_ALIGNED);
6154 __ bind(&two_byte_sequential);
6155 __ AllocateTwoByteString(
r0,
r2,
r4,
r6,
r7, &runtime);
6171 __ bind(&return_r0);
6172 Counters* counters = masm->isolate()->counters();
6173 __ IncrementCounter(counters->sub_string_native(), 1,
r3,
r4);
6174 __ add(
sp,
sp, Operand(3 * kPointerSize));
6179 __ TailCallRuntime(Runtime::kSubString, 3, 1);
6188 Register scratch3) {
6189 Register length = scratch1;
6192 Label strings_not_equal, check_zero_length;
6195 __ cmp(length, scratch2);
6196 __ b(
eq, &check_zero_length);
6197 __ bind(&strings_not_equal);
6202 Label compare_chars;
6203 __ bind(&check_zero_length);
6205 __ cmp(length, Operand(0));
6206 __ b(
ne, &compare_chars);
6211 __ bind(&compare_chars);
6212 GenerateAsciiCharsCompareLoop(masm,
6213 left, right, length, scratch2, scratch3,
6214 &strings_not_equal);
6228 Register scratch4) {
6229 Label result_not_equal, compare_lengths;
6233 __ sub(scratch3, scratch1, Operand(scratch2),
SetCC);
6234 Register length_delta = scratch3;
6236 Register min_length = scratch1;
6238 __ cmp(min_length, Operand(0));
6239 __ b(
eq, &compare_lengths);
6242 GenerateAsciiCharsCompareLoop(masm,
6243 left, right, min_length, scratch2, scratch4,
6247 __ bind(&compare_lengths);
6250 __ mov(
r0, Operand(length_delta),
SetCC);
6251 __ bind(&result_not_equal);
6260 void StringCompareStub::GenerateAsciiCharsCompareLoop(
6261 MacroAssembler* masm,
6267 Label* chars_not_equal) {
6271 __ SmiUntag(length);
6272 __ add(scratch1, length,
6274 __ add(left, left, Operand(scratch1));
6275 __ add(right, right, Operand(scratch1));
6276 __ rsb(length, length, Operand::Zero());
6277 Register index = length;
6284 __ cmp(scratch1, scratch2);
6285 __ b(
ne, chars_not_equal);
6286 __ add(index, index, Operand(1),
SetCC);
6291 void StringCompareStub::Generate(MacroAssembler* masm) {
6294 Counters* counters = masm->isolate()->counters();
6303 __ b(
ne, ¬_same);
6307 __ IncrementCounter(counters->string_compare_native(), 1,
r1,
r2);
6308 __ add(
sp,
sp, Operand(2 * kPointerSize));
6314 __ JumpIfNotBothSequentialAsciiStrings(
r1,
r0,
r2,
r3, &runtime);
6317 __ IncrementCounter(counters->string_compare_native(), 1,
r2,
r3);
6318 __ add(
sp,
sp, Operand(2 * kPointerSize));
6324 __ TailCallRuntime(Runtime::kStringCompare, 2, 1);
6328 void StringAddStub::Generate(MacroAssembler* masm) {
6329 Label call_runtime, call_builtin;
6332 Counters* counters = masm->isolate()->counters();
6344 __ JumpIfEitherSmi(
r0,
r1, &call_runtime);
6354 __ b(
ne, &call_runtime);
6360 GenerateConvertArgument(
6361 masm, 1 * kPointerSize,
r0,
r2,
r3,
r4,
r5, &call_builtin);
6362 builtin_id = Builtins::STRING_ADD_RIGHT;
6364 ASSERT((flags_ & NO_STRING_CHECK_LEFT_IN_STUB) != 0);
6365 GenerateConvertArgument(
6366 masm, 0 * kPointerSize,
r1,
r2,
r3,
r4,
r5, &call_builtin);
6367 builtin_id = Builtins::STRING_ADD_LEFT;
6377 Label strings_not_empty;
6387 __ b(
ne, &strings_not_empty);
6389 __ IncrementCounter(counters->string_add_native(), 1,
r2,
r3);
6390 __ add(
sp,
sp, Operand(2 * kPointerSize));
6393 __ bind(&strings_not_empty);
6406 Label string_add_flat_result, longer_than_two;
6412 __ cmp(
r6, Operand(2));
6413 __ b(
ne, &longer_than_two);
6422 __ JumpIfBothInstanceTypesAreNotSequentialAscii(
r4,
r5,
r6,
r7,
6431 Label make_two_character_string;
6434 __ IncrementCounter(counters->string_add_native(), 1,
r2,
r3);
6435 __ add(
sp,
sp, Operand(2 * kPointerSize));
6438 __ bind(&make_two_character_string);
6444 __ mov(
r6, Operand(2));
6445 __ AllocateAsciiString(
r0,
r6,
r4,
r5,
r9, &call_runtime);
6447 __ IncrementCounter(counters->string_add_native(), 1,
r2,
r3);
6448 __ add(
sp,
sp, Operand(2 * kPointerSize));
6451 __ bind(&longer_than_two);
6454 __ b(
lt, &string_add_flat_result);
6460 __ b(
hs, &call_runtime);
6470 Label non_ascii, allocated, ascii_data;
6474 __ b(
eq, &non_ascii);
6477 __ bind(&ascii_data);
6478 __ AllocateAsciiConsString(
r7,
r6,
r4,
r5, &call_runtime);
6479 __ bind(&allocated);
6484 __ IncrementCounter(counters->string_add_native(), 1,
r2,
r3);
6485 __ add(
sp,
sp, Operand(2 * kPointerSize));
6488 __ bind(&non_ascii);
6495 __ b(
ne, &ascii_data);
6500 __ b(
eq, &ascii_data);
6503 __ AllocateTwoByteConsString(
r7,
r6,
r4,
r5, &call_runtime);
6517 Label first_prepared, second_prepared;
6518 __ bind(&string_add_flat_result);
6529 __ b(
ne, &call_runtime);
6539 __ b(
eq, &first_prepared);
6543 __ b(
ne, &call_runtime);
6545 __ bind(&first_prepared);
6555 __ b(
eq, &second_prepared);
6559 __ b(
ne, &call_runtime);
6561 __ bind(&second_prepared);
6563 Label non_ascii_string_add_flat_result;
6572 __ b(
eq, &non_ascii_string_add_flat_result);
6574 __ AllocateAsciiString(
r0,
r6,
r4,
r5,
r9, &call_runtime);
6585 __ IncrementCounter(counters->string_add_native(), 1,
r2,
r3);
6586 __ add(
sp,
sp, Operand(2 * kPointerSize));
6589 __ bind(&non_ascii_string_add_flat_result);
6590 __ AllocateTwoByteString(
r0,
r6,
r4,
r5,
r9, &call_runtime);
6601 __ IncrementCounter(counters->string_add_native(), 1,
r2,
r3);
6602 __ add(
sp,
sp, Operand(2 * kPointerSize));
6606 __ bind(&call_runtime);
6607 __ TailCallRuntime(Runtime::kStringAdd, 2, 1);
6609 if (call_builtin.is_linked()) {
6610 __ bind(&call_builtin);
6616 void StringAddStub::GenerateConvertArgument(MacroAssembler* masm,
6625 Label not_string, done;
6626 __ JumpIfSmi(arg, ¬_string);
6632 __ bind(¬_string);
6642 __ mov(arg, scratch1);
6647 __ bind(¬_cached);
6648 __ JumpIfSmi(arg, slow);
6649 __ CompareObjectType(
6665 void ICCompareStub::GenerateSmis(MacroAssembler* masm) {
6669 __ JumpIfNotSmi(
r2, &miss);
6671 if (GetCondition() ==
eq) {
6686 void ICCompareStub::GenerateHeapNumbers(MacroAssembler* masm) {
6690 Label unordered, maybe_undefined1, maybe_undefined2;
6693 __ JumpIfSmi(
r2, &generic_stub);
6696 __ b(
ne, &maybe_undefined1);
6698 __ b(
ne, &maybe_undefined2);
6703 CpuFeatures::Scope scope(
VFP2);
6712 __ VFPCompareAndSetFlags(
d0,
d1);
6715 __ b(
vs, &unordered);
6724 __ bind(&unordered);
6726 __ bind(&generic_stub);
6727 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET);
6729 __ bind(&maybe_undefined1);
6731 __ CompareRoot(
r0, Heap::kUndefinedValueRootIndex);
6734 __ b(
ne, &maybe_undefined2);
6738 __ bind(&maybe_undefined2);
6740 __ CompareRoot(
r1, Heap::kUndefinedValueRootIndex);
6741 __ b(
eq, &unordered);
6749 void ICCompareStub::GenerateSymbols(MacroAssembler* masm) {
6755 Register right =
r0;
6760 __ JumpIfEitherSmi(left, right, &miss);
6768 __ and_(tmp1, tmp1, Operand(tmp2));
6773 __ cmp(left, right);
6787 void ICCompareStub::GenerateStrings(MacroAssembler* masm) {
6795 Register right =
r0;
6802 __ JumpIfEitherSmi(left, right, &miss);
6811 __ orr(tmp3, tmp1, tmp2);
6816 __ cmp(left, right);
6829 __ and_(tmp3, tmp1, Operand(tmp2));
6839 __ JumpIfBothInstanceTypesAreNotSequentialAscii(
6840 tmp1, tmp2, tmp3, tmp4, &runtime);
6845 masm, left, right, tmp1, tmp2, tmp3);
6848 masm, left, right, tmp1, tmp2, tmp3, tmp4);
6853 __ Push(left, right);
6855 __ TailCallRuntime(Runtime::kStringEquals, 2, 1);
6857 __ TailCallRuntime(Runtime::kStringCompare, 2, 1);
6865 void ICCompareStub::GenerateObjects(MacroAssembler* masm) {
6869 __ JumpIfSmi(
r2, &miss);
6885 void ICCompareStub::GenerateKnownObjects(MacroAssembler* masm) {
6888 __ JumpIfSmi(
r2, &miss);
6891 __ cmp(
r2, Operand(known_map_));
6893 __ cmp(
r3, Operand(known_map_));
6905 void ICCompareStub::GenerateMiss(MacroAssembler* masm) {
6908 ExternalReference miss =
6909 ExternalReference(IC_Utility(IC::kCompareIC_Miss), masm->isolate());
6917 __ CallExternalReference(miss, 3);
6936 ExternalReference
function) {
6937 __ mov(
r2, Operand(
function));
6944 __ mov(
lr, Operand(reinterpret_cast<intptr_t>(GetCode().location()),
6945 RelocInfo::CODE_TARGET));
6958 masm->SizeOfCodeGeneratedSince(&start));
6966 Register properties,
6967 Handle<String> name,
6968 Register scratch0) {
6974 for (
int i = 0; i < kInlinedProbes; i++) {
6977 Register index = scratch0;
6980 __ sub(index, index, Operand(1));
6981 __ and_(index, index, Operand(
6982 Smi::FromInt(name->Hash() + StringDictionary::GetProbeOffset(i))));
6986 __ add(index, index, Operand(index,
LSL, 1));
6988 Register entity_name = scratch0;
6991 Register tmp = properties;
6992 __ add(tmp, properties, Operand(index,
LSL, 1));
6995 ASSERT(!tmp.is(entity_name));
6996 __ LoadRoot(tmp, Heap::kUndefinedValueRootIndex);
6997 __ cmp(entity_name, tmp);
7000 if (i != kInlinedProbes - 1) {
7002 __ LoadRoot(tmp, Heap::kTheHoleValueRootIndex);
7005 __ cmp(entity_name, Operand(Handle<String>(name)));
7009 __ cmp(entity_name, tmp);
7010 __ b(
eq, &the_hole);
7014 __ ldrb(entity_name,
7027 const int spill_mask =
7033 __ mov(
r1, Operand(Handle<String>(name)));
7036 __ cmp(
r0, Operand(0));
7054 Register scratch2) {
7055 ASSERT(!elements.is(scratch1));
7056 ASSERT(!elements.is(scratch2));
7057 ASSERT(!name.is(scratch1));
7058 ASSERT(!name.is(scratch2));
7060 __ AssertString(name);
7065 __ sub(scratch1, scratch1, Operand(1));
7070 for (
int i = 0; i < kInlinedProbes; i++) {
7077 ASSERT(StringDictionary::GetProbeOffset(i) <
7079 __ add(scratch2, scratch2, Operand(
7087 __ add(scratch2, scratch2, Operand(scratch2,
LSL, 1));
7090 __ add(scratch2, elements, Operand(scratch2,
LSL, 2));
7092 __ cmp(name, Operand(
ip));
7096 const int spill_mask =
7099 ~(scratch1.bit() | scratch2.bit());
7105 __ Move(
r0, elements);
7107 __ Move(
r0, elements);
7112 __ cmp(
r0, Operand(0));
7113 __ mov(scratch2, Operand(
r2));
7133 Register result =
r0;
7134 Register dictionary =
r0;
7136 Register index =
r2;
7139 Register undefined =
r5;
7140 Register entry_key =
r6;
7142 Label in_dictionary, maybe_in_dictionary, not_in_dictionary;
7146 __ sub(mask, mask, Operand(1));
7150 __ LoadRoot(undefined, Heap::kUndefinedValueRootIndex);
7152 for (
int i = kInlinedProbes; i < kTotalProbes; i++) {
7159 ASSERT(StringDictionary::GetProbeOffset(i) <
7161 __ add(index, hash, Operand(
7164 __ mov(index, Operand(hash));
7170 __ add(index, index, Operand(index,
LSL, 1));
7173 __ add(index, dictionary, Operand(index,
LSL, 2));
7177 __ cmp(entry_key, Operand(undefined));
7178 __ b(
eq, ¬_in_dictionary);
7181 __ cmp(entry_key, Operand(key));
7182 __ b(
eq, &in_dictionary);
7190 __ b(
eq, &maybe_in_dictionary);
7194 __ bind(&maybe_in_dictionary);
7199 __ mov(result, Operand::Zero());
7203 __ bind(&in_dictionary);
7204 __ mov(result, Operand(1));
7207 __ bind(¬_in_dictionary);
7208 __ mov(result, Operand::Zero());
7213 struct AheadOfTimeWriteBarrierStubList {
7214 Register object, value, address;
7218 #define REG(Name) { kRegister_ ## Name ## _Code }
7220 static const AheadOfTimeWriteBarrierStubList kAheadOfTime[] = {
7259 for (
const AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime;
7260 !entry->object.is(
no_reg);
7262 if (object_.
is(entry->object) &&
7263 value_.
is(entry->value) &&
7264 address_.
is(entry->address) &&
7265 remembered_set_action_ == entry->action &&
7281 stub1.GetCode()->set_is_pregenerated(
true);
7286 for (
const AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime;
7287 !entry->object.is(
no_reg);
7294 stub.GetCode()->set_is_pregenerated(
true);
7299 bool CodeStub::CanUseFPRegisters() {
7308 void RecordWriteStub::Generate(MacroAssembler* masm) {
7309 Label skip_to_incremental_noncompacting;
7310 Label skip_to_incremental_compacting;
7321 __ b(&skip_to_incremental_noncompacting);
7322 __ b(&skip_to_incremental_compacting);
7326 __ RememberedSetHelper(object_,
7334 __ bind(&skip_to_incremental_noncompacting);
7337 __ bind(&skip_to_incremental_compacting);
7349 void RecordWriteStub::GenerateIncremental(MacroAssembler* masm, Mode mode) {
7353 Label dont_need_remembered_set;
7355 __ ldr(regs_.scratch0(),
MemOperand(regs_.address(), 0));
7356 __ JumpIfNotInNewSpace(regs_.scratch0(),
7358 &dont_need_remembered_set);
7360 __ CheckPageFlag(regs_.object(),
7364 &dont_need_remembered_set);
7368 CheckNeedsToInformIncrementalMarker(
7369 masm, kUpdateRememberedSetOnNoNeedToInformIncrementalMarker, mode);
7370 InformIncrementalMarker(masm, mode);
7371 regs_.Restore(masm);
7372 __ RememberedSetHelper(object_,
7378 __ bind(&dont_need_remembered_set);
7381 CheckNeedsToInformIncrementalMarker(
7382 masm, kReturnOnNoNeedToInformIncrementalMarker, mode);
7383 InformIncrementalMarker(masm, mode);
7384 regs_.Restore(masm);
7389 void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm, Mode mode) {
7390 regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode_);
7391 int argument_count = 3;
7392 __ PrepareCallCFunction(argument_count, regs_.scratch0());
7394 r0.
is(regs_.address()) ? regs_.scratch0() : regs_.address();
7395 ASSERT(!address.is(regs_.object()));
7397 __ Move(address, regs_.address());
7398 __ Move(
r0, regs_.object());
7400 __ Move(
r1, address);
7405 __ mov(
r2, Operand(ExternalReference::isolate_address()));
7407 AllowExternalCallThatCantCauseGC scope(masm);
7410 ExternalReference::incremental_evacuation_record_write_function(
7416 ExternalReference::incremental_marking_record_write_function(
7420 regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode_);
7424 void RecordWriteStub::CheckNeedsToInformIncrementalMarker(
7425 MacroAssembler* masm,
7426 OnNoNeedToInformIncrementalMarker on_no_need,
7429 Label need_incremental;
7430 Label need_incremental_pop_scratch;
7433 __ ldr(regs_.scratch1(),
7436 __ sub(regs_.scratch1(), regs_.scratch1(), Operand(1),
SetCC);
7437 __ str(regs_.scratch1(),
7440 __ b(
mi, &need_incremental);
7444 __ JumpIfBlack(regs_.object(), regs_.scratch0(), regs_.scratch1(), &on_black);
7446 regs_.Restore(masm);
7447 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
7448 __ RememberedSetHelper(object_,
7460 __ ldr(regs_.scratch0(),
MemOperand(regs_.address(), 0));
7463 Label ensure_not_white;
7465 __ CheckPageFlag(regs_.scratch0(),
7471 __ CheckPageFlag(regs_.object(),
7477 __ bind(&ensure_not_white);
7482 __ Push(regs_.object(), regs_.address());
7483 __ EnsureNotWhite(regs_.scratch0(),
7487 &need_incremental_pop_scratch);
7488 __ Pop(regs_.object(), regs_.address());
7490 regs_.Restore(masm);
7491 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
7492 __ RememberedSetHelper(object_,
7501 __ bind(&need_incremental_pop_scratch);
7502 __ Pop(regs_.object(), regs_.address());
7504 __ bind(&need_incremental);
7510 void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) {
7520 Label double_elements;
7522 Label slow_elements;
7523 Label fast_elements;
7525 __ CheckFastElements(
r2,
r5, &double_elements);
7527 __ JumpIfSmi(
r0, &smi_element);
7528 __ CheckFastSmiElements(
r2,
r5, &fast_elements);
7532 __ bind(&slow_elements);
7538 __ TailCallRuntime(Runtime::kStoreArrayLiteralElement, 5, 1);
7541 __ bind(&fast_elements);
7553 __ bind(&smi_element);
7560 __ bind(&double_elements);
7562 __ StoreNumberToDoubleElements(
r0,
r3,
r1,
7571 if (entry_hook_ !=
NULL) {
7572 PredictableCodeSizeScope predictable(masm);
7581 void ProfileEntryHookStub::Generate(MacroAssembler* masm) {
7583 const int32_t kReturnAddressDistanceFromFunctionStart =
7588 const int32_t kNumSavedRegs = 3;
7591 __ sub(
r0,
lr, Operand(kReturnAddressDistanceFromFunctionStart));
7595 __ add(
r1,
sp, Operand(kNumSavedRegs * kPointerSize));
7598 int frame_alignment = masm->ActivationFrameAlignment();
7599 if (frame_alignment > kPointerSize) {
7602 __ and_(
sp,
sp, Operand(-frame_alignment));
7605 #if defined(V8_HOST_ARCH_ARM)
7606 __ mov(
ip, Operand(reinterpret_cast<int32_t>(&entry_hook_)));
7612 reinterpret_cast<intptr_t
>(EntryHookTrampoline));
7613 ApiFunction dispatcher(trampoline_address);
7614 __ mov(
ip, Operand(ExternalReference(&dispatcher,
7615 ExternalReference::BUILTIN_CALL,
7621 if (frame_alignment > kPointerSize) {
7633 #endif // V8_TARGET_ARCH_ARM
static const int kResourceDataOffset
static const int kCallerFPOffset
void GenerateFast(MacroAssembler *masm)
void GenerateSlow(MacroAssembler *masm, const RuntimeCallHelper &call_helper)
static const int kLengthOffset
static const int kBitFieldOffset
void GenerateFast(MacroAssembler *masm)
STATIC_CHECK((kStringRepresentationMask|kStringEncodingMask)==Internals::kFullStringRepresentationMask)
static const int kNumRegisters
const intptr_t kSmiTagMask
static int GetBranchOffset(Instr instr)
static const int kCodeOffset
static const int kEvacuationCandidateMask
#define CHECK_EQ(expected, value)
void GenerateSlow(MacroAssembler *masm, const RuntimeCallHelper &call_helper)
static const int kCodeEntryOffset
static const int kMaxAsciiCharCode
const RegList kCallerSaved
static const int kPrototypeOrInitialMapOffset
#define COMPARE(asm_, compare_string)
static int SlotOffset(int index)
RecordWriteStub(Register object, Register value, Register address, RememberedSetAction remembered_set_action, SaveFPRegsMode fp_mode)
static const int kArgumentsObjectSize
static void GenerateFixedRegStubsAheadOfTime()
const uint32_t kTwoByteStringTag
const int kFailureTypeTagSize
static const uint32_t kExponentMask
static const char * GetName(TypeInfo type_info)
static Failure * InternalError()
static void GenerateCopyCharacters(MacroAssembler *masm, Register dest, Register src, Register count, Register scratch, bool ascii)
static void LoadNumberAsInt32Double(MacroAssembler *masm, Register object, Destination destination, DwVfpRegister double_dst, DwVfpRegister double_scratch, Register dst1, Register dst2, Register heap_number_map, Register scratch1, Register scratch2, SwVfpRegister single_scratch, Label *not_int32)
static const char * Name(Value tok)
static Smi * FromInt(int value)
void Generate(MacroAssembler *masm)
static void DoubleIs32BitInteger(MacroAssembler *masm, Register src1, Register src2, Register dst, Register scratch, Label *not_int32)
static const int kOptimizedCodeMapOffset
static const int kDataOffset
static const int kGlobalReceiverOffset
static const int kJSRegexpStaticOffsetsVectorSize
static void GenerateCopyCharactersLong(MacroAssembler *masm, Register dest, Register src, Register count, Register scratch1, Register scratch2, Register scratch3, Register scratch4, Register scratch5, int flags)
void Generate(MacroAssembler *masm)
static Failure * OutOfMemoryException()
static void GenerateHashGetHash(MacroAssembler *masm, Register hash)
static const int kExponentBias
static const intptr_t kPageAlignmentMask
static Handle< Object > UninitializedSentinel(Isolate *isolate)
SwVfpRegister high() const
static bool IsSupported(CpuFeature f)
static Failure * Exception()
void Generate(MacroAssembler *masm)
virtual bool IsPregenerated()
void Generate(MacroAssembler *masm)
static const int kCallerSPOffset
bool CanBeUndetectable() const
#define ASSERT(condition)
static void LoadOperands(MacroAssembler *masm, FloatingPointHelper::Destination destination, Register heap_number_map, Register scratch1, Register scratch2, Label *not_number)
friend class BlockConstPoolScope
WriteInt32ToHeapNumberStub(Register the_int, Register the_heap_number, Register scratch)
const int kPointerSizeLog2
static const int kInstanceSizeOffset
static void GenerateCompareFlatAsciiStrings(MacroAssembler *masm, Register left, Register right, Register scratch1, Register scratch2, Register scratch3, Register scratch4)
static Handle< Object > MegamorphicSentinel(Isolate *isolate)
#define kFirstCalleeSavedDoubleReg
static const char * GetName(TypeInfo type_info)
const uint32_t kStringRepresentationMask
MemOperand GlobalObjectOperand()
static DwVfpRegister from_code(int code)
static const int kEntrySize
const intptr_t kObjectAlignmentMask
MemOperand ContextOperand(Register context, int index)
static const int kContextOffset
const uint32_t kAsciiDataHintTag
const uint32_t kShortExternalStringMask
static void GenerateNegativeLookup(MacroAssembler *masm, Label *miss, Label *done, Register receiver, Register properties, Handle< String > name, Register scratch0)
static const int kLastSubjectOffset
static const int kZeroHash
void Generate(MacroAssembler *masm)
static const int kHashFieldOffset
static const int kLastCaptureCountOffset
static const int kFirstOffset
static const int kMinLength
StringDictionaryLookupStub(LookupMode mode)
const int kNumDoubleCalleeSaved
const uint32_t kNotStringTag
static const int kParentOffset
static const int kNonMantissaBitsInTopWord
static const int kLiteralsOffset
DwVfpRegister DoubleRegister
static const int kArgumentsObjectSizeStrict
STATIC_ASSERT((FixedDoubleArray::kHeaderSize &kDoubleAlignmentMask)==0)
static int ActivationFrameAlignment()
static const int kLengthOffset
static const int kCapacityOffset
const uint32_t kIsSymbolMask
static const int kExponentShift
const intptr_t kFailureTagMask
static const int kValueOffset
const int kFailureTagSize
static void GenerateFlatAsciiStringEquals(MacroAssembler *masm, Register left, Register right, Register scratch1, Register scratch2, Register scratch3)
#define kLastCalleeSavedDoubleReg
static const int kIrregexpCaptureCountOffset
static const int kInputOffset
static const size_t kWriteBarrierCounterOffset
static bool IsBitOp(Value op)
const uint32_t kIsIndirectStringMask
void Generate(MacroAssembler *masm)
static void LoadSmis(MacroAssembler *masm, Destination destination, Register scratch1, Register scratch2)
virtual bool IsPregenerated()
static void CallCCodeForDoubleOperation(MacroAssembler *masm, Token::Value op, Register heap_number_result, Register scratch)
static const int kPcLoadDelta
static const int kStringWrapperSafeForDefaultValueOf
static void MaybeCallEntryHook(MacroAssembler *masm)
const RegList kCalleeSaved
const uint32_t kAsciiDataHintMask
static void ConvertNumberToInt32(MacroAssembler *masm, Register object, Register dst, Register heap_number_map, Register scratch1, Register scratch2, Register scratch3, DwVfpRegister double_scratch, Label *not_int32)
void Generate(MacroAssembler *masm)
static const int kPropertiesOffset
static void LoadNumberAsInt32(MacroAssembler *masm, Register object, Register dst, Register heap_number_map, Register scratch1, Register scratch2, Register scratch3, DwVfpRegister double_scratch0, DwVfpRegister double_scratch1, Label *not_int32)
static void PatchBranchIntoNop(MacroAssembler *masm, int pos)
static const int kMinLength
const uint32_t kShortExternalStringTag
static void GenerateHashAddCharacter(MacroAssembler *masm, Register hash, Register character)
static void Generate(MacroAssembler *masm, Register string, Register index, Register result, Label *call_runtime)
static const int kHeaderSize
static const int kNextFunctionLinkOffset
void Generate(MacroAssembler *masm)
static int SizeFor(int length)
static const int kElementsOffset
const uint32_t kStringTag
static bool IsEqualityOp(Value op)
static const int kOffsetOffset
void Generate(MacroAssembler *masm)
static const int kLengthOffset
static int SizeFor(int length)
void GenerateSlow(MacroAssembler *masm, const RuntimeCallHelper &call_helper)
virtual void Generate(MacroAssembler *masm)
static const int kLastMatchOverhead
static const int kHeaderSize
const intptr_t kPointerAlignmentMask
void Generate(MacroAssembler *masm)
void GenerateCall(MacroAssembler *masm, ExternalReference function)
static const int kMapOffset
static const int kMantissaBitsInTopWord
bool is(Register reg) const
static const int kSkipEvacuationSlotsRecordingMask
const uint32_t kIsNotStringMask
const int kNumCalleeSaved
static void GenerateFixedRegStubsAheadOfTime()
const uint32_t kSlicedNotConsMask
static const int kLengthOffset
static void ConvertIntToDouble(MacroAssembler *masm, Register int_scratch, Destination destination, DwVfpRegister double_dst, Register dst1, Register dst2, Register scratch2, SwVfpRegister single_scratch)
void Generate(MacroAssembler *masm)
void Generate(MacroAssembler *masm)
static const int kSecondOffset
static void GeneratePositiveLookup(MacroAssembler *masm, Label *miss, Label *done, Register elements, Register name, Register r0, Register r1)
static const int kCallerFPOffset
static const int kArgumentsLengthIndex
MemOperand FieldMemOperand(Register object, int offset)
const intptr_t kObjectAlignment
static const int kContextOffset
static const int kFunctionOffset
static const int kFirstCaptureOffset
static const uint32_t kHashBitMask
static const uint32_t kSignMask
static const int kLastInputOffset
static const int kHeaderSize
void GenerateBody(MacroAssembler *masm, bool is_construct)
static const int kDataAsciiCodeOffset
Condition NegateCondition(Condition cond)
#define ASSERT_EQ(v1, v2)
static void GenerateAheadOfTime()
static const uint32_t kMantissaMask
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print more details following each garbage collection print amount of external allocated memory after each time it is adjusted flush code that we expect not to use again before full gc do incremental marking steps track object counts and memory usage use caching Perform compaction on every full GC Never perform compaction on full GC testing only Compact code space on full incremental collections Default seed for initializing random allows verbose printing trace parsing and preparsing Check icache flushes in ARM and MIPS simulator Stack alingment in bytes in print stack trace when throwing exceptions randomize hashes to avoid predictable hash Fixed seed to use to hash property activate a timer that switches between V8 threads testing_bool_flag float flag Seed used for threading test randomness A filename with extra code to be included in the Print usage message
static const int kArgumentsCalleeIndex
static const int kIsUndetectable
static const int kHeaderSize
void Generate(MacroAssembler *masm)
static const int kEntryLength
void GenerateFast(MacroAssembler *masm)
static void GenerateLookupNumberStringCache(MacroAssembler *masm, Register object, Register result, Register scratch1, Register scratch2, Register scratch3, bool object_is_smi, Label *not_found)
static const int kInstrSize
static const int kDataTagOffset
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if NULL
static const int kPrototypeOffset
static void GenerateFixedRegStubsAheadOfTime()
static const int kElementsStartOffset
static void GenerateTwoCharacterSymbolTableProbe(MacroAssembler *masm, Register c1, Register c2, Register scratch1, Register scratch2, Register scratch3, Register scratch4, Register scratch5, Label *not_found)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
static const int kMaxLength
static const int kValueOffset
Operand SmiUntagOperand(Register object)
bool Contains(Type type) const
const uint32_t kSymbolTag
static const int kNativeContextOffset
const uint32_t kAsciiStringTag
static const int kConstructStubOffset
static const int kExponentBits
static const int kHashShift
static const int kSharedFunctionInfoOffset
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print more details following each garbage collection print amount of external allocated memory after each time it is adjusted flush code that we expect not to use again before full gc do incremental marking steps track object counts and memory usage use caching Perform compaction on every full GC Never perform compaction on full GC testing only Compact code space on full incremental collections Default seed for initializing random allows verbose printing trace parsing and preparsing Check icache flushes in ARM and MIPS simulator Stack alingment in bytes in print stack trace when throwing exceptions randomize hashes to avoid predictable hash Fixed seed to use to hash property activate a timer that switches between V8 threads testing_bool_flag float flag Seed used for threading test randomness A filename with extra code to be included in the Print usage including flags
void Generate(MacroAssembler *masm)
static const int kBitField2Offset
void Generate(MacroAssembler *masm)
CEntryStub(int result_size, SaveFPRegsMode save_doubles=kDontSaveFPRegs)
void check(i::Vector< const char > string)
static const int kExponentOffset
static const int kValueOffset
static const int kDataUC16CodeOffset
void Generate(MacroAssembler *masm)
StoreBufferOverflowStub(SaveFPRegsMode save_fp)
virtual bool IsPregenerated()
static void GenerateHashInit(MacroAssembler *masm, Register hash, Register character)
static bool IsOrderedRelationalCompareOp(Value op)
const uint32_t kStringEncodingMask
static const int kInstanceTypeOffset
static const int kIndexOffset
void Generate(MacroAssembler *masm)
static const int kMantissaOffset
void Generate(MacroAssembler *masm)