30 #if defined(V8_TARGET_ARCH_X64)
39 #define __ ACCESS_MASM(masm)
43 Label check_heap_number, call_builtin;
45 __ j(
not_zero, &check_heap_number, Label::kNear);
48 __ bind(&check_heap_number);
50 Heap::kHeapNumberMapRootIndex);
54 __ bind(&call_builtin);
65 Counters* counters = masm->isolate()->counters();
70 __ IncrementCounter(counters->fast_new_closure_total(), 1);
88 __ LoadRoot(
rbx, Heap::kEmptyFixedArrayRootIndex);
89 __ LoadRoot(
r8, Heap::kTheHoleValueRootIndex);
90 __ LoadRoot(
rdi, Heap::kUndefinedValueRootIndex);
101 Label check_optimized;
102 Label install_unoptimized;
103 if (FLAG_cache_optimized_code) {
107 __ j(
not_zero, &check_optimized, Label::kNear);
109 __ bind(&install_unoptimized);
119 __ bind(&check_optimized);
121 __ IncrementCounter(counters->fast_new_closure_try_optimized(), 1);
126 Label install_optimized;
130 __ j(
equal, &install_optimized);
153 __ bind(&install_optimized);
154 __ IncrementCounter(counters->fast_new_closure_install_optimized(), 1);
172 __ RecordWriteContextSlot(
184 __ jmp(&install_unoptimized);
192 __ PushRoot(Heap::kFalseValueRootIndex);
194 __ TailCallRuntime(Runtime::kNewClosure, 3, 1);
224 __ LoadRoot(
rbx, Heap::kUndefinedValueRootIndex);
225 for (
int i = Context::MIN_CONTEXT_SLOTS; i < length; i++) {
235 __ TailCallRuntime(Runtime::kNewFunctionContext, 1, 1);
266 Label after_sentinel;
267 __ JumpIfNotSmi(
rcx, &after_sentinel, Label::kNear);
268 if (FLAG_debug_code) {
269 const char*
message =
"Expected 0 as a Smi sentinel";
270 __ cmpq(
rcx, Immediate(0));
276 __ bind(&after_sentinel);
288 __ LoadRoot(
rbx, Heap::kTheHoleValueRootIndex);
289 for (
int i = 0; i < slots_; i++) {
299 __ TailCallRuntime(Runtime::kPushBlockContext, 2, 1);
303 static void GenerateFastCloneShallowArrayCommon(
304 MacroAssembler* masm,
314 int elements_size = 0;
318 : FixedArray::SizeFor(length);
338 __ lea(
rdx, Operand(
rax, JSArray::kSize));
354 while (i < elements_size) {
359 ASSERT(i == elements_size);
378 __ CompareRoot(
rcx, Heap::kUndefinedValueRootIndex);
384 Factory* factory = masm->isolate()->factory();
386 Label double_elements, check_fast_elements;
389 factory->fixed_cow_array_map());
391 GenerateFastCloneShallowArrayCommon(masm, 0,
395 __ bind(&check_fast_elements);
397 factory->fixed_array_map());
399 GenerateFastCloneShallowArrayCommon(masm, length_,
403 __ bind(&double_elements);
408 if (FLAG_debug_code) {
412 message =
"Expected (writable) fixed array";
413 expected_map_index = Heap::kFixedArrayMapRootIndex;
415 message =
"Expected (writable) fixed double array";
416 expected_map_index = Heap::kFixedDoubleArrayMapRootIndex;
419 message =
"Expected copy-on-write fixed array";
420 expected_map_index = Heap::kFixedCOWArrayMapRootIndex;
430 GenerateFastCloneShallowArrayCommon(masm, length_, mode, &slow_case);
434 __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1);
454 __ CompareRoot(
rcx, Heap::kUndefinedValueRootIndex);
474 __ ret(4 * kPointerSize);
477 __ TailCallRuntime(Runtime::kCreateObjectLiteralShallow, 4, 1);
487 const Register argument =
rax;
488 const Register map =
rdx;
491 __ movq(argument, Operand(
rsp, 1 * kPointerSize));
495 CheckOddball(masm,
UNDEFINED, Heap::kUndefinedValueRootIndex,
false);
498 CheckOddball(masm,
BOOLEAN, Heap::kFalseValueRootIndex,
false);
499 CheckOddball(masm,
BOOLEAN, Heap::kTrueValueRootIndex,
true);
502 CheckOddball(masm,
NULL_TYPE, Heap::kNullValueRootIndex,
false);
507 __ JumpIfNotSmi(argument, ¬_smi, Label::kNear);
509 if (!tos_.
is(argument)) {
510 __ movq(tos_, argument);
512 __ ret(1 * kPointerSize);
516 __ JumpIfSmi(argument, &patch, Label::kNear);
526 Label not_undetectable;
527 __ j(
zero, ¬_undetectable, Label::kNear);
529 __ ret(1 * kPointerSize);
530 __ bind(¬_undetectable);
538 __ j(
below, ¬_js_object, Label::kNear);
540 if (!tos_.
is(argument)) {
543 __ ret(1 * kPointerSize);
544 __ bind(¬_js_object);
553 __ ret(1 * kPointerSize);
554 __ bind(¬_string);
559 Label not_heap_number, false_result;
560 __ CompareRoot(map, Heap::kHeapNumberMapRootIndex);
564 __ j(
zero, &false_result, Label::kNear);
566 if (!tos_.
is(argument)) {
569 __ ret(1 * kPointerSize);
570 __ bind(&false_result);
572 __ ret(1 * kPointerSize);
573 __ bind(¬_heap_number);
577 GenerateTypeTransition(masm);
582 __ PushCallerSaved(save_doubles_);
583 const int argument_count = 1;
584 __ PrepareCallCFunction(argument_count);
586 __ LoadAddress(
rcx, ExternalReference::isolate_address());
588 __ LoadAddress(
rdi, ExternalReference::isolate_address());
591 AllowExternalCallThatCantCauseGC scope(masm);
593 ExternalReference::store_buffer_overflow_function(masm->isolate()),
595 __ PopCallerSaved(save_doubles_);
600 void ToBooleanStub::CheckOddball(MacroAssembler* masm,
604 const Register argument =
rax;
607 Label different_value;
608 __ CompareRoot(argument, value);
613 }
else if (!tos_.
is(argument)) {
618 __ ret(1 * kPointerSize);
619 __ bind(&different_value);
624 void ToBooleanStub::GenerateTypeTransition(MacroAssembler* masm) {
631 __ TailCallExternalReference(
632 ExternalReference(IC_Utility(IC::kToBoolean_Patch), masm->isolate()),
638 class FloatingPointHelper :
public AllStatic {
644 static void LoadSSE2SmiOperands(MacroAssembler* masm);
645 static void LoadSSE2NumberOperands(MacroAssembler* masm);
646 static void LoadSSE2UnknownOperands(MacroAssembler* masm,
651 static void LoadAsIntegers(MacroAssembler* masm,
652 Label* operand_conversion_failure,
653 Register heap_number_map);
656 static void LoadNumbersAsIntegers(MacroAssembler* masm);
668 static void NumbersToSmis(MacroAssembler* masm,
681 void IntegerConvert(MacroAssembler* masm,
689 Register double_exponent =
rbx;
690 Register double_value =
rdi;
691 Label done, exponent_63_plus;
695 __ xorl(result, result);
696 __ movq(
xmm0, double_value);
699 __ lea(double_exponent, Operand(double_value, double_value,
times_1, 0));
703 __ cmpl(double_exponent, Immediate(63));
706 __ cvttsd2siq(result,
xmm0);
707 __ jmp(&done, Label::kNear);
709 __ bind(&exponent_63_plus);
711 __ cmpl(double_exponent, Immediate(83));
714 __ j(
above, &done, Label::kNear);
721 __ addq(double_value, double_value);
722 __ sbbl(result, result);
724 __ addl(double_value, result);
728 if (result.is(
rcx)) {
729 __ xorl(double_value, result);
734 __ shll_cl(double_value);
735 __ movl(result, double_value);
738 __ xorl(result, double_value);
747 void UnaryOpStub::Generate(MacroAssembler* masm) {
748 switch (operand_type_) {
750 GenerateTypeTransition(masm);
753 GenerateSmiStub(masm);
756 GenerateHeapNumberStub(masm);
759 GenerateGenericStub(masm);
765 void UnaryOpStub::GenerateTypeTransition(MacroAssembler* masm) {
777 __ TailCallExternalReference(
778 ExternalReference(IC_Utility(IC::kUnaryOp_Patch), masm->isolate()), 4, 1);
783 void UnaryOpStub::GenerateSmiStub(MacroAssembler* masm) {
786 GenerateSmiStubSub(masm);
789 GenerateSmiStubBitNot(masm);
797 void UnaryOpStub::GenerateSmiStubSub(MacroAssembler* masm) {
799 GenerateSmiCodeSub(masm, &slow, &slow, Label::kNear, Label::kNear);
801 GenerateTypeTransition(masm);
805 void UnaryOpStub::GenerateSmiStubBitNot(MacroAssembler* masm) {
807 GenerateSmiCodeBitNot(masm, &non_smi, Label::kNear);
809 GenerateTypeTransition(masm);
813 void UnaryOpStub::GenerateSmiCodeSub(MacroAssembler* masm,
816 Label::Distance non_smi_near,
817 Label::Distance slow_near) {
819 __ JumpIfNotSmi(
rax, non_smi, non_smi_near);
820 __ SmiNeg(
rax,
rax, &done, Label::kNear);
821 __ jmp(slow, slow_near);
827 void UnaryOpStub::GenerateSmiCodeBitNot(MacroAssembler* masm,
829 Label::Distance non_smi_near) {
830 __ JumpIfNotSmi(
rax, non_smi, non_smi_near);
837 void UnaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) {
840 GenerateHeapNumberStubSub(masm);
843 GenerateHeapNumberStubBitNot(masm);
851 void UnaryOpStub::GenerateHeapNumberStubSub(MacroAssembler* masm) {
852 Label non_smi, slow, call_builtin;
853 GenerateSmiCodeSub(masm, &non_smi, &call_builtin, Label::kNear);
855 GenerateHeapNumberCodeSub(masm, &slow);
857 GenerateTypeTransition(masm);
858 __ bind(&call_builtin);
859 GenerateGenericCodeFallback(masm);
863 void UnaryOpStub::GenerateHeapNumberStubBitNot(
864 MacroAssembler* masm) {
866 GenerateSmiCodeBitNot(masm, &non_smi, Label::kNear);
868 GenerateHeapNumberCodeBitNot(masm, &slow);
870 GenerateTypeTransition(masm);
874 void UnaryOpStub::GenerateHeapNumberCodeSub(MacroAssembler* masm,
878 Heap::kHeapNumberMapRootIndex);
889 Label slow_allocate_heapnumber, heapnumber_allocated;
890 __ AllocateHeapNumber(
rcx,
rbx, &slow_allocate_heapnumber);
891 __ jmp(&heapnumber_allocated);
893 __ bind(&slow_allocate_heapnumber);
897 __ CallRuntime(Runtime::kNumberAlloc, 0);
901 __ bind(&heapnumber_allocated);
916 void UnaryOpStub::GenerateHeapNumberCodeBitNot(MacroAssembler* masm,
920 Heap::kHeapNumberMapRootIndex);
924 IntegerConvert(masm,
rax,
rax);
934 void UnaryOpStub::GenerateGenericStub(MacroAssembler* masm) {
937 GenerateGenericStubSub(masm);
940 GenerateGenericStubBitNot(masm);
948 void UnaryOpStub::GenerateGenericStubSub(MacroAssembler* masm) {
950 GenerateSmiCodeSub(masm, &non_smi, &slow, Label::kNear);
952 GenerateHeapNumberCodeSub(masm, &slow);
954 GenerateGenericCodeFallback(masm);
958 void UnaryOpStub::GenerateGenericStubBitNot(MacroAssembler* masm) {
960 GenerateSmiCodeBitNot(masm, &non_smi, Label::kNear);
962 GenerateHeapNumberCodeBitNot(masm, &slow);
964 GenerateGenericCodeFallback(masm);
968 void UnaryOpStub::GenerateGenericCodeFallback(MacroAssembler* masm) {
986 void UnaryOpStub::PrintName(StringStream* stream) {
988 const char* overwrite_name =
NULL;
993 stream->Add(
"UnaryOpStub_%s_%s_%s",
1000 void BinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) {
1015 __ TailCallExternalReference(
1016 ExternalReference(IC_Utility(IC::kBinaryOp_Patch),
1023 void BinaryOpStub::Generate(MacroAssembler* masm) {
1026 AllowStubCallsScope allow_stub_calls(masm,
true);
1028 switch (operands_type_) {
1030 GenerateTypeTransition(masm);
1033 GenerateSmiStub(masm);
1041 GenerateHeapNumberStub(masm);
1044 GenerateOddballStub(masm);
1047 GenerateBothStringStub(masm);
1050 GenerateStringStub(masm);
1053 GenerateGeneric(masm);
1061 void BinaryOpStub::PrintName(StringStream* stream) {
1063 const char* overwrite_name;
1068 default: overwrite_name =
"UnknownOverwrite";
break;
1070 stream->Add(
"BinaryOpStub_%s_%s_%s",
1077 void BinaryOpStub::GenerateSmiCode(
1078 MacroAssembler* masm,
1080 SmiCodeGenerateHeapNumberResults allow_heapnumber_results) {
1083 const Register left =
rdx;
1084 const Register right =
rax;
1088 bool generate_inline_heapnumber_results =
1089 (allow_heapnumber_results == ALLOW_HEAPNUMBER_RESULTS) &&
1096 Label use_fp_on_smis;
1099 if (op_ != Token::BIT_OR) {
1100 Comment smi_check_comment(masm,
"-- Smi check arguments");
1101 __ JumpIfNotBothSmi(left, right, ¬_smis);
1105 __ bind(&smi_values);
1107 Comment perform_smi(masm,
"-- Perform smi operation");
1111 __ SmiAdd(right, right, left, &use_fp_on_smis);
1115 __ SmiSub(left, left, right, &use_fp_on_smis);
1121 __ SmiMul(right, right, left, &use_fp_on_smis);
1138 case Token::BIT_OR: {
1140 __ SmiOrIfSmis(right, right, left, ¬_smis);
1143 case Token::BIT_XOR:
1145 __ SmiXor(right, right, left);
1148 case Token::BIT_AND:
1150 __ SmiAnd(right, right, left);
1154 __ SmiShiftLeft(left, left, right);
1159 __ SmiShiftArithmeticRight(left, left, right);
1164 __ SmiShiftLogicalRight(left, left, right, &use_fp_on_smis);
1175 if (use_fp_on_smis.is_linked()) {
1179 __ bind(&use_fp_on_smis);
1180 if (op_ ==
Token::DIV || op_ == Token::MOD) {
1186 if (generate_inline_heapnumber_results) {
1187 __ AllocateHeapNumber(
rcx,
rbx, slow);
1188 Comment perform_float(masm,
"-- Perform float operation on smis");
1189 if (op_ == Token::SHR) {
1190 __ SmiToInteger32(left, left);
1191 __ cvtqsi2sd(
xmm0, left);
1193 FloatingPointHelper::LoadSSE2SmiOperands(masm);
1216 Comment done_comment(masm,
"-- Enter non-smi code");
1217 FloatingPointHelper::NumbersToSmis(masm, left, right,
rbx,
rdi,
rcx,
1218 &smi_values, &fail);
1219 __ jmp(&smi_values);
1224 void BinaryOpStub::GenerateFloatingPointCode(MacroAssembler* masm,
1225 Label* allocation_failure,
1226 Label* non_numeric_failure) {
1232 FloatingPointHelper::LoadSSE2UnknownOperands(masm, non_numeric_failure);
1241 GenerateHeapResultAllocation(masm, allocation_failure);
1248 __ jmp(allocation_failure);
1252 case Token::BIT_AND:
1253 case Token::BIT_XOR:
1257 Label non_smi_shr_result;
1258 Register heap_number_map =
r9;
1259 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
1260 FloatingPointHelper::LoadAsIntegers(masm, non_numeric_failure,
1263 case Token::BIT_OR:
__ orl(
rax,
rcx);
break;
1264 case Token::BIT_AND:
__ andl(
rax,
rcx);
break;
1265 case Token::BIT_XOR:
__ xorl(
rax,
rcx);
break;
1266 case Token::SAR:
__ sarl_cl(
rax);
break;
1267 case Token::SHL:
__ shll_cl(
rax);
break;
1286 if (op_ == Token::SHR) {
1287 __ bind(&non_smi_shr_result);
1288 Label allocation_failed;
1300 __ AssertRootValue(heap_number_map,
1301 Heap::kHeapNumberMapRootIndex,
1302 "HeapNumberMap register clobbered.");
1309 __ bind(&allocation_failed);
1314 __ jmp(allocation_failure);
1321 if (FLAG_debug_code) {
1322 __ Abort(
"Unexpected fall-through in "
1323 "BinaryStub::GenerateFloatingPointCode.");
1328 void BinaryOpStub::GenerateStringAddCode(MacroAssembler* masm) {
1330 Label left_not_string, call_runtime;
1333 Register left =
rdx;
1334 Register right =
rax;
1337 __ JumpIfSmi(left, &left_not_string, Label::kNear);
1341 GenerateRegisterArgsPush(masm);
1342 __ TailCallStub(&string_add_left_stub);
1345 __ bind(&left_not_string);
1346 __ JumpIfSmi(right, &call_runtime, Label::kNear);
1351 GenerateRegisterArgsPush(masm);
1352 __ TailCallStub(&string_add_right_stub);
1355 __ bind(&call_runtime);
1359 void BinaryOpStub::GenerateCallRuntimeCode(MacroAssembler* masm) {
1360 GenerateRegisterArgsPush(masm);
1380 case Token::BIT_AND:
1383 case Token::BIT_XOR:
1401 void BinaryOpStub::GenerateSmiStub(MacroAssembler* masm) {
1406 GenerateSmiCode(masm,
NULL, NO_HEAPNUMBER_RESULTS);
1410 GenerateSmiCode(masm, &call_runtime, ALLOW_HEAPNUMBER_RESULTS);
1415 GenerateTypeTransition(masm);
1417 if (call_runtime.is_linked()) {
1418 __ bind(&call_runtime);
1419 GenerateCallRuntimeCode(masm);
1424 void BinaryOpStub::GenerateStringStub(MacroAssembler* masm) {
1427 GenerateStringAddCode(masm);
1430 GenerateTypeTransition(masm);
1434 void BinaryOpStub::GenerateBothStringStub(MacroAssembler* masm) {
1442 Register left =
rdx;
1443 Register right =
rax;
1446 __ JumpIfSmi(left, &call_runtime);
1451 __ JumpIfSmi(right, &call_runtime);
1456 GenerateRegisterArgsPush(masm);
1457 __ TailCallStub(&string_add_stub);
1459 __ bind(&call_runtime);
1460 GenerateTypeTransition(masm);
1464 void BinaryOpStub::GenerateOddballStub(MacroAssembler* masm) {
1470 GenerateStringAddCode(masm);
1475 __ CompareRoot(
rdx, Heap::kUndefinedValueRootIndex);
1480 __ LoadRoot(
rdx, Heap::kNanValueRootIndex);
1482 __ jmp(&done, Label::kNear);
1484 __ CompareRoot(
rax, Heap::kUndefinedValueRootIndex);
1489 __ LoadRoot(
rax, Heap::kNanValueRootIndex);
1493 GenerateHeapNumberStub(masm);
1497 void BinaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) {
1498 Label gc_required, not_number;
1499 GenerateFloatingPointCode(masm, &gc_required, ¬_number);
1501 __ bind(¬_number);
1502 GenerateTypeTransition(masm);
1504 __ bind(&gc_required);
1505 GenerateCallRuntimeCode(masm);
1509 void BinaryOpStub::GenerateGeneric(MacroAssembler* masm) {
1510 Label call_runtime, call_string_add_or_runtime;
1512 GenerateSmiCode(masm, &call_runtime, ALLOW_HEAPNUMBER_RESULTS);
1514 GenerateFloatingPointCode(masm, &call_runtime, &call_string_add_or_runtime);
1516 __ bind(&call_string_add_or_runtime);
1518 GenerateStringAddCode(masm);
1521 __ bind(&call_runtime);
1522 GenerateCallRuntimeCode(masm);
1526 void BinaryOpStub::GenerateHeapResultAllocation(MacroAssembler* masm,
1527 Label* alloc_failure) {
1528 Label skip_allocation;
1534 __ JumpIfNotSmi(
rdx, &skip_allocation);
1537 __ AllocateHeapNumber(
rbx,
rcx, alloc_failure);
1541 __ bind(&skip_allocation);
1549 __ JumpIfNotSmi(
rax, &skip_allocation);
1554 __ AllocateHeapNumber(
rbx,
rcx, alloc_failure);
1558 __ bind(&skip_allocation);
1565 void BinaryOpStub::GenerateRegisterArgsPush(MacroAssembler* masm) {
1588 Label runtime_call_clear_stack;
1590 const bool tagged = (argument_type_ ==
TAGGED);
1592 Label input_not_smi, loaded;
1594 __ movq(
rax, Operand(
rsp, kPointerSize));
1595 __ JumpIfNotSmi(
rax, &input_not_smi, Label::kNear);
1604 __ fld_d(Operand(
rsp, 0));
1606 __ jmp(&loaded, Label::kNear);
1608 __ bind(&input_not_smi);
1610 __ LoadRoot(
rbx, Heap::kHeapNumberMapRootIndex);
1634 __ sar(
rdx, Immediate(32));
1639 __ sarl(
rdx, Immediate(8));
1640 __ sarl(
rcx, Immediate(16));
1641 __ sarl(
rax, Immediate(24));
1646 __ andl(
rcx, Immediate(TranscendentalCache::SubCache::kCacheSize - 1));
1651 ExternalReference cache_array =
1652 ExternalReference::transcendental_cache_array_address(masm->isolate());
1653 __ movq(
rax, cache_array);
1654 int cache_array_index =
1655 type_ *
sizeof(Isolate::Current()->transcendental_cache()->caches_[0]);
1656 __ movq(
rax, Operand(
rax, cache_array_index));
1660 __ j(
zero, &runtime_call_clear_stack);
1664 TranscendentalCache::SubCache::Element test_elem[2];
1665 char* elem_start =
reinterpret_cast<char*
>(&test_elem[0]);
1666 char* elem2_start =
reinterpret_cast<char*
>(&test_elem[1]);
1667 char* elem_in0 =
reinterpret_cast<char*
>(&(test_elem[0].in[0]));
1668 char* elem_in1 =
reinterpret_cast<char*
>(&(test_elem[0].in[1]));
1669 char* elem_out =
reinterpret_cast<char*
>(&(test_elem[0].output));
1671 CHECK_EQ(16, static_cast<int>(elem2_start - elem_start));
1672 CHECK_EQ(0, static_cast<int>(elem_in0 - elem_start));
1685 Counters* counters = masm->isolate()->counters();
1686 __ IncrementCounter(counters->transcendental_cache_hit(), 1);
1690 __ ret(kPointerSize);
1696 __ bind(&cache_miss);
1697 __ IncrementCounter(counters->transcendental_cache_miss(), 1);
1700 __ AllocateHeapNumber(
rax,
rdi, &runtime_call_clear_stack);
1702 __ AllocateHeapNumber(
rax,
rdi, &skip_cache);
1711 __ ret(kPointerSize);
1717 __ bind(&skip_cache);
1720 __ fld_d(Operand(
rsp, 0));
1722 __ fstp_d(Operand(
rsp, 0));
1731 __ CallRuntimeSaveDoubles(Runtime::kAllocateInNewSpace);
1738 __ bind(&runtime_call_clear_stack);
1740 __ bind(&runtime_call);
1741 __ TailCallExternalReference(
1742 ExternalReference(RuntimeFunction(), masm->isolate()), 1, 1);
1744 __ bind(&runtime_call_clear_stack);
1745 __ bind(&runtime_call);
1746 __ AllocateHeapNumber(
rax,
rdi, &skip_cache);
1751 __ CallRuntime(RuntimeFunction(), 1);
1768 return Runtime::kAbort;
1796 __ cmpl(
rdi, Immediate(supported_exponent_limit));
1799 __ cmpl(
rdi, Immediate(0x7ff));
1800 Label non_nan_result;
1805 __ subq(
rsp, Immediate(kPointerSize));
1806 __ movl(Operand(
rsp, 4), Immediate(0x7ff80000));
1807 __ movl(Operand(
rsp, 0), Immediate(0x00000000));
1808 __ fld_d(Operand(
rsp, 0));
1809 __ addq(
rsp, Immediate(kPointerSize));
1812 __ bind(&non_nan_result);
1821 Label no_exceptions;
1825 __ testl(
rax, Immediate(5));
1826 __ j(
zero, &no_exceptions);
1828 __ bind(&no_exceptions);
1833 Label partial_remainder_loop;
1834 __ bind(&partial_remainder_loop);
1838 __ testl(
rax, Immediate(0x400));
1878 void FloatingPointHelper::LoadNumbersAsIntegers(MacroAssembler* masm) {
1882 Label rax_is_object;
1883 Label rdx_is_object;
1885 __ JumpIfNotSmi(
rdx, &rdx_is_object);
1887 __ JumpIfSmi(
rax, &rax_is_smi);
1889 __ bind(&rax_is_object);
1890 IntegerConvert(masm,
rcx,
rax);
1893 __ bind(&rdx_is_object);
1894 IntegerConvert(masm,
rdx,
rdx);
1895 __ JumpIfNotSmi(
rax, &rax_is_object);
1896 __ bind(&rax_is_smi);
1907 void FloatingPointHelper::LoadAsIntegers(MacroAssembler* masm,
1908 Label* conversion_failure,
1909 Register heap_number_map) {
1911 Label arg1_is_object, check_undefined_arg1;
1912 Label arg2_is_object, check_undefined_arg2;
1913 Label load_arg2, done;
1915 __ JumpIfNotSmi(
rdx, &arg1_is_object);
1920 __ bind(&check_undefined_arg1);
1921 __ CompareRoot(
rdx, Heap::kUndefinedValueRootIndex);
1926 __ bind(&arg1_is_object);
1930 IntegerConvert(masm,
r8,
rdx);
1933 __ bind(&load_arg2);
1935 __ JumpIfNotSmi(
rax, &arg2_is_object);
1940 __ bind(&check_undefined_arg2);
1941 __ CompareRoot(
rax, Heap::kUndefinedValueRootIndex);
1946 __ bind(&arg2_is_object);
1950 IntegerConvert(masm,
rcx,
rax);
1956 void FloatingPointHelper::LoadSSE2SmiOperands(MacroAssembler* masm) {
1964 void FloatingPointHelper::LoadSSE2NumberOperands(MacroAssembler* masm) {
1965 Label load_smi_rdx, load_nonsmi_rax, load_smi_rax, done;
1967 __ JumpIfSmi(
rdx, &load_smi_rdx);
1970 __ JumpIfSmi(
rax, &load_smi_rax);
1971 __ bind(&load_nonsmi_rax);
1975 __ bind(&load_smi_rdx);
1978 __ JumpIfNotSmi(
rax, &load_nonsmi_rax);
1980 __ bind(&load_smi_rax);
1988 void FloatingPointHelper::LoadSSE2UnknownOperands(MacroAssembler* masm,
1989 Label* not_numbers) {
1990 Label load_smi_rdx, load_nonsmi_rax, load_smi_rax, load_float_rax, done;
1992 __ LoadRoot(
rcx, Heap::kHeapNumberMapRootIndex);
1993 __ JumpIfSmi(
rdx, &load_smi_rdx);
1998 __ JumpIfSmi(
rax, &load_smi_rax);
2000 __ bind(&load_nonsmi_rax);
2006 __ bind(&load_smi_rdx);
2009 __ JumpIfNotSmi(
rax, &load_nonsmi_rax);
2011 __ bind(&load_smi_rax);
2018 void FloatingPointHelper::NumbersToSmis(MacroAssembler* masm,
2025 Label* on_not_smis) {
2026 Register heap_number_map = scratch3;
2027 Register smi_result = scratch1;
2030 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
2033 __ JumpIfSmi(first, &first_smi, Label::kNear);
2039 __ cvttsd2siq(smi_result,
xmm0);
2042 __ cvtlsi2sd(
xmm1, smi_result);
2046 __ Integer32ToSmi(first, smi_result);
2048 __ JumpIfSmi(second, (on_success !=
NULL) ? on_success : &done);
2049 __ bind(&first_smi);
2050 __ AssertNotSmi(second);
2056 __ cvttsd2siq(smi_result,
xmm0);
2057 __ cvtlsi2sd(
xmm1, smi_result);
2061 __ Integer32ToSmi(second, smi_result);
2062 if (on_success !=
NULL) {
2073 const Register exponent =
rdx;
2075 const Register exponent =
rdi;
2077 const Register base =
rax;
2078 const Register scratch =
rcx;
2079 const XMMRegister double_result =
xmm3;
2080 const XMMRegister double_base =
xmm2;
2081 const XMMRegister double_exponent =
xmm1;
2082 const XMMRegister double_scratch =
xmm4;
2084 Label call_runtime, done, exponent_not_smi, int_exponent;
2087 __ movq(scratch, Immediate(1));
2088 __ cvtlsi2sd(double_result, scratch);
2091 Label base_is_smi, unpack_exponent;
2095 __ movq(base, Operand(
rsp, 2 * kPointerSize));
2096 __ movq(exponent, Operand(
rsp, 1 * kPointerSize));
2097 __ JumpIfSmi(base, &base_is_smi, Label::kNear);
2099 Heap::kHeapNumberMapRootIndex);
2103 __ jmp(&unpack_exponent, Label::kNear);
2105 __ bind(&base_is_smi);
2106 __ SmiToInteger32(base, base);
2107 __ cvtlsi2sd(double_base, base);
2108 __ bind(&unpack_exponent);
2110 __ JumpIfNotSmi(exponent, &exponent_not_smi, Label::kNear);
2111 __ SmiToInteger32(exponent, exponent);
2112 __ jmp(&int_exponent);
2114 __ bind(&exponent_not_smi);
2116 Heap::kHeapNumberMapRootIndex);
2119 }
else if (exponent_type_ ==
TAGGED) {
2120 __ JumpIfNotSmi(exponent, &exponent_not_smi, Label::kNear);
2121 __ SmiToInteger32(exponent, exponent);
2122 __ jmp(&int_exponent);
2124 __ bind(&exponent_not_smi);
2128 if (exponent_type_ !=
INTEGER) {
2131 __ cvttsd2si(exponent, double_exponent);
2133 __ cmpl(exponent, Immediate(0x80000000u));
2135 __ cvtlsi2sd(double_scratch, exponent);
2137 __ ucomisd(double_exponent, double_scratch);
2144 Label continue_sqrt, continue_rsqrt, not_plus_half;
2148 __ movq(double_scratch, scratch);
2150 __ ucomisd(double_scratch, double_exponent);
2158 __ movq(double_scratch, scratch);
2159 __ ucomisd(double_scratch, double_base);
2163 __ j(
carry, &continue_sqrt, Label::kNear);
2166 __ xorps(double_result, double_result);
2167 __ subsd(double_result, double_scratch);
2170 __ bind(&continue_sqrt);
2172 __ xorps(double_scratch, double_scratch);
2173 __ addsd(double_scratch, double_base);
2174 __ sqrtsd(double_result, double_scratch);
2178 __ bind(¬_plus_half);
2180 __ subsd(double_scratch, double_result);
2182 __ ucomisd(double_scratch, double_exponent);
2190 __ movq(double_scratch, scratch);
2191 __ ucomisd(double_scratch, double_base);
2195 __ j(
carry, &continue_rsqrt, Label::kNear);
2198 __ xorps(double_result, double_result);
2201 __ bind(&continue_rsqrt);
2203 __ xorps(double_exponent, double_exponent);
2204 __ addsd(double_exponent, double_base);
2205 __ sqrtsd(double_exponent, double_exponent);
2206 __ divsd(double_result, double_exponent);
2211 Label fast_power_failed;
2212 __ bind(&fast_power);
2216 __ movsd(Operand(
rsp, 0), double_exponent);
2217 __ fld_d(Operand(
rsp, 0));
2218 __ movsd(Operand(
rsp, 0), double_base);
2219 __ fld_d(Operand(
rsp, 0));
2238 __ testb(
rax, Immediate(0x5F));
2239 __ j(
not_zero, &fast_power_failed, Label::kNear);
2240 __ fstp_d(Operand(
rsp, 0));
2241 __ movsd(double_result, Operand(
rsp, 0));
2245 __ bind(&fast_power_failed);
2248 __ jmp(&call_runtime);
2252 __ bind(&int_exponent);
2253 const XMMRegister double_scratch2 = double_exponent;
2255 __ movq(scratch, exponent);
2256 __ movsd(double_scratch, double_base);
2257 __ movsd(double_scratch2, double_result);
2260 Label no_neg, while_true, while_false;
2261 __ testl(scratch, scratch);
2266 __ j(
zero, &while_false, Label::kNear);
2267 __ shrl(scratch, Immediate(1));
2270 __ j(
above, &while_true, Label::kNear);
2271 __ movsd(double_result, double_scratch);
2272 __ j(
zero, &while_false, Label::kNear);
2274 __ bind(&while_true);
2275 __ shrl(scratch, Immediate(1));
2276 __ mulsd(double_scratch, double_scratch);
2277 __ j(
above, &while_true, Label::kNear);
2278 __ mulsd(double_result, double_scratch);
2281 __ bind(&while_false);
2283 __ testl(exponent, exponent);
2285 __ divsd(double_scratch2, double_result);
2286 __ movsd(double_result, double_scratch2);
2289 __ xorps(double_scratch2, double_scratch2);
2290 __ ucomisd(double_scratch2, double_result);
2295 __ cvtlsi2sd(double_exponent, exponent);
2298 Counters* counters = masm->isolate()->counters();
2301 __ bind(&call_runtime);
2302 __ TailCallRuntime(Runtime::kMath_pow_cfunction, 2, 1);
2307 __ AllocateHeapNumber(
rax,
rcx, &call_runtime);
2309 __ IncrementCounter(counters->math_pow(), 1);
2310 __ ret(2 * kPointerSize);
2312 __ bind(&call_runtime);
2314 __ movsd(
xmm0, double_base);
2317 AllowExternalCallThatCantCauseGC scope(masm);
2318 __ PrepareCallCFunction(2);
2320 ExternalReference::power_double_double_function(masm->isolate()), 2);
2323 __ movsd(double_result,
xmm0);
2328 __ IncrementCounter(counters->math_pow(), 1);
2334 void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
2344 __ JumpIfNotSmi(
rdx, &slow);
2364 __ lea(
rbx, Operand(
rbp, index.reg, index.scale, 0));
2366 __ movq(
rax, Operand(
rbx, index.reg, index.scale, kDisplacement));
2379 __ lea(
rbx, Operand(
rbx, index.reg, index.scale, 0));
2381 __ movq(
rax, Operand(
rbx, index.reg, index.scale, kDisplacement));
2390 __ TailCallRuntime(Runtime::kGetArgumentsProperty, 1, 1);
2394 void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) {
2404 Factory* factory = masm->isolate()->factory();
2406 __ SmiToInteger64(
rbx, Operand(
rsp, 1 * kPointerSize));
2411 Label adaptor_frame, try_allocate;
2419 __ jmp(&try_allocate, Label::kNear);
2422 __ bind(&adaptor_frame);
2423 __ SmiToInteger64(
rcx,
2428 __ movq(Operand(
rsp, 2 * kPointerSize),
rdx);
2437 __ bind(&try_allocate);
2441 const int kParameterMapHeaderSize =
2443 Label no_parameter_map;
2446 __ j(
zero, &no_parameter_map, Label::kNear);
2448 __ bind(&no_parameter_map);
2462 Label has_mapped_parameters, copy;
2466 __ j(
not_zero, &has_mapped_parameters, Label::kNear);
2470 __ jmp(©, Label::kNear);
2473 __ bind(&has_mapped_parameters);
2489 __ movq(
rdx, Operand(
rsp, 3 * kPointerSize));
2514 Label skip_parameter_map;
2516 __ j(
zero, &skip_parameter_map);
2521 __ Integer64PlusConstantToSmi(
r9,
rbx, 2);
2535 Label parameters_loop, parameters_test;
2540 __ addq(
r8, Operand(
rsp, 1 * kPointerSize));
2542 __ Move(
r11, factory->the_hole_value());
2550 __ jmp(¶meters_test, Label::kNear);
2552 __ bind(¶meters_loop);
2557 kParameterMapHeaderSize),
2564 __ bind(¶meters_test);
2566 __ j(
not_zero, ¶meters_loop, Label::kNear);
2568 __ bind(&skip_parameter_map);
2574 factory->fixed_array_map());
2577 Label arguments_loop, arguments_test;
2579 __ movq(
rdx, Operand(
rsp, 2 * kPointerSize));
2584 __ jmp(&arguments_test, Label::kNear);
2586 __ bind(&arguments_loop);
2587 __ subq(
rdx, Immediate(kPointerSize));
2593 __ addq(
r8, Immediate(1));
2595 __ bind(&arguments_test);
2597 __ j(
less, &arguments_loop, Label::kNear);
2600 __ ret(3 * kPointerSize);
2606 __ movq(Operand(
rsp, 1 * kPointerSize),
rcx);
2607 __ TailCallRuntime(Runtime::kNewStrictArgumentsFast, 3, 1);
2611 void ArgumentsAccessStub::GenerateNewNonStrictSlow(MacroAssembler* masm) {
2626 __ movq(Operand(
rsp, 1 * kPointerSize),
rcx);
2630 __ movq(Operand(
rsp, 2 * kPointerSize),
rdx);
2633 __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1);
2637 void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
2644 Label adaptor_frame, try_allocate, runtime;
2651 __ movq(
rcx, Operand(
rsp, 1 * kPointerSize));
2653 __ jmp(&try_allocate);
2656 __ bind(&adaptor_frame);
2658 __ movq(Operand(
rsp, 1 * kPointerSize),
rcx);
2662 __ movq(Operand(
rsp, 2 * kPointerSize),
rdx);
2666 Label add_arguments_object;
2667 __ bind(&try_allocate);
2669 __ j(
zero, &add_arguments_object, Label::kNear);
2671 __ bind(&add_arguments_object);
2682 __ movq(
rdi, Operand(
rdi, offset));
2692 __ movq(
rcx, Operand(
rsp, 1 * kPointerSize));
2703 __ movq(
rdx, Operand(
rsp, 2 * kPointerSize));
2720 __ movq(
rbx, Operand(
rdx, -1 * kPointerSize));
2722 __ addq(
rdi, Immediate(kPointerSize));
2723 __ subq(
rdx, Immediate(kPointerSize));
2729 __ ret(3 * kPointerSize);
2733 __ TailCallRuntime(Runtime::kNewStrictArgumentsFast, 3, 1);
2737 void RegExpExecStub::Generate(MacroAssembler* masm) {
2741 #ifdef V8_INTERPRETED_REGEXP
2742 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
2743 #else // V8_INTERPRETED_REGEXP
2752 static const int kLastMatchInfoOffset = 1 *
kPointerSize;
2753 static const int kPreviousIndexOffset = 2 *
kPointerSize;
2759 Isolate* isolate = masm->isolate();
2760 ExternalReference address_of_regexp_stack_memory_address =
2761 ExternalReference::address_of_regexp_stack_memory_address(isolate);
2762 ExternalReference address_of_regexp_stack_memory_size =
2763 ExternalReference::address_of_regexp_stack_memory_size(isolate);
2769 __ movq(
rax, Operand(
rsp, kJSRegExpOffset));
2770 __ JumpIfSmi(
rax, &runtime);
2775 if (FLAG_debug_code) {
2778 "Unexpected type for RegExp data, FixedArray expected");
2780 __ Check(
equal,
"Unexpected type for RegExp data, FixedArray expected");
2791 __ SmiToInteger32(
rdx,
2802 __ movq(
rdi, Operand(
rsp, kSubjectOffset));
2803 __ JumpIfSmi(
rdi, &runtime);
2812 __ movq(
rbx, Operand(
rsp, kPreviousIndexOffset));
2813 __ JumpIfNotSmi(
rbx, &runtime);
2820 __ movq(
rdi, Operand(
rsp, kLastMatchInfoOffset));
2821 __ JumpIfSmi(
rdi, &runtime);
2828 Heap::kFixedArrayMapRootIndex);
2842 Label seq_ascii_string, seq_two_byte_string, check_code;
2843 __ movq(
rdi, Operand(
rsp, kSubjectOffset));
2854 __ j(
zero, &seq_two_byte_string, Label::kNear);
2861 __ j(
zero, &seq_ascii_string, Label::kNear);
2870 Label cons_string, external_string, check_encoding;
2876 __ j(
less, &cons_string, Label::kNear);
2877 __ j(
equal, &external_string);
2890 __ jmp(&check_encoding, Label::kNear);
2892 __ bind(&cons_string);
2894 Heap::kEmptyStringRootIndex);
2900 __ bind(&check_encoding);
2905 __ j(
zero, &seq_two_byte_string, Label::kNear);
2911 __ bind(&seq_ascii_string);
2916 __ jmp(&check_code, Label::kNear);
2918 __ bind(&seq_two_byte_string);
2924 __ bind(&check_code);
2928 __ JumpIfSmi(
r11, &runtime);
2935 __ SmiToInteger64(
rbx, Operand(
rsp, kPreviousIndexOffset));
2942 Counters* counters = masm->isolate()->counters();
2943 __ IncrementCounter(counters->regexp_entry_native(), 1);
2946 static const int kRegExpExecuteArguments = 9;
2947 int argument_slots_on_stack =
2948 masm->ArgumentStackSlotsForCFunctionCall(kRegExpExecuteArguments);
2949 __ EnterApiExitFrame(argument_slots_on_stack);
2955 __ movq(Operand(
rsp, (argument_slots_on_stack - 1) * kPointerSize),
2959 __ movq(Operand(
rsp, (argument_slots_on_stack - 2) * kPointerSize),
2967 __ movq(Operand(
rsp, (argument_slots_on_stack - 3) * kPointerSize),
r9);
2973 __ movq(Operand(
rsp, (argument_slots_on_stack - 4) * kPointerSize),
2981 ExternalReference::address_of_static_offsets_vector(isolate));
2984 __ movq(Operand(
rsp, (argument_slots_on_stack - 5) * kPointerSize),
r8);
2991 Register arg2 =
rdx;
2992 Register arg1 =
rcx;
2994 Register arg4 =
rcx;
2995 Register arg3 =
rdx;
2996 Register arg2 =
rsi;
2997 Register arg1 =
rdi;
3013 Label setup_two_byte, setup_rest, got_length, length_not_from_slice;
3024 __ j(
zero, &setup_two_byte, Label::kNear);
3027 __ jmp(&setup_rest, Label::kNear);
3028 __ bind(&setup_two_byte);
3031 __ bind(&setup_rest);
3044 __ LeaveApiExitFrame();
3049 __ cmpl(
rax, Immediate(1));
3052 __ j(
equal, &success, Label::kNear);
3061 __ LoadRoot(
rax, Heap::kNullValueRootIndex);
3062 __ ret(4 * kPointerSize);
3066 __ movq(
rax, Operand(
rsp, kJSRegExpOffset));
3068 __ SmiToInteger32(
rax,
3075 __ movq(
rax, Operand(
rsp, kLastMatchInfoOffset));
3085 __ movq(
rax, Operand(
rsp, kSubjectOffset));
3087 __ RecordWriteField(
rbx,
3092 __ movq(
rax, Operand(
rsp, kSubjectOffset));
3094 __ RecordWriteField(
rbx,
3102 ExternalReference::address_of_static_offsets_vector(isolate));
3107 Label next_capture, done;
3110 __ bind(&next_capture);
3111 __ subq(
rdx, Immediate(1));
3122 __ jmp(&next_capture);
3126 __ movq(
rax, Operand(
rsp, kLastMatchInfoOffset));
3127 __ ret(4 * kPointerSize);
3129 __ bind(&exception);
3134 ExternalReference pending_exception_address(
3135 Isolate::kPendingExceptionAddress, isolate);
3136 Operand pending_exception_operand =
3137 masm->ExternalOperand(pending_exception_address,
rbx);
3138 __ movq(
rax, pending_exception_operand);
3139 __ LoadRoot(
rdx, Heap::kTheHoleValueRootIndex);
3142 __ movq(pending_exception_operand,
rdx);
3144 __ CompareRoot(
rax, Heap::kTerminationExceptionRootIndex);
3145 Label termination_exception;
3146 __ j(
equal, &termination_exception, Label::kNear);
3149 __ bind(&termination_exception);
3150 __ ThrowUncatchable(
rax);
3155 __ bind(&external_string);
3158 if (FLAG_debug_code) {
3162 __ Assert(
zero,
"external string expected, but not found");
3171 __ jmp(&seq_two_byte_string);
3175 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
3176 #endif // V8_INTERPRETED_REGEXP
3180 void RegExpConstructResultStub::Generate(MacroAssembler* masm) {
3181 const int kMaxInlineLength = 100;
3184 __ movq(
r8, Operand(
rsp, kPointerSize * 3));
3185 __ JumpIfNotSmi(
r8, &slowcase);
3187 __ cmpl(
rbx, Immediate(kMaxInlineLength));
3222 __ movq(
r8, Operand(
rsp, kPointerSize * 1));
3224 __ movq(
r8, Operand(
rsp, kPointerSize * 2));
3226 __ movq(
r8, Operand(
rsp, kPointerSize * 3));
3241 __ LoadRoot(
rdx, Heap::kUndefinedValueRootIndex);
3252 __ subl(
rbx, Immediate(1));
3257 __ ret(3 * kPointerSize);
3260 __ TailCallRuntime(Runtime::kRegExpConstructResult, 3, 1);
3272 Register number_string_cache = result;
3273 Register mask = scratch1;
3274 Register scratch = scratch2;
3277 __ LoadRoot(number_string_cache, Heap::kNumberStringCacheRootIndex);
3283 __ shrl(mask, Immediate(1));
3284 __ subq(mask, Immediate(1));
3291 Label load_result_from_cache;
3292 Factory* factory = masm->isolate()->factory();
3293 if (!object_is_smi) {
3294 __ JumpIfSmi(
object, &is_smi);
3296 factory->heap_number_map(),
3303 GenerateConvertHashCodeToIndex(masm, scratch, mask);
3305 Register index = scratch;
3306 Register probe = mask;
3312 __ JumpIfSmi(probe, not_found);
3318 __ jmp(&load_result_from_cache);
3322 __ SmiToInteger32(scratch,
object);
3323 GenerateConvertHashCodeToIndex(masm, scratch, mask);
3325 Register index = scratch;
3335 __ bind(&load_result_from_cache);
3341 Counters* counters = masm->isolate()->counters();
3342 __ IncrementCounter(counters->number_to_string_native(), 1);
3346 void NumberToStringStub::GenerateConvertHashCodeToIndex(MacroAssembler* masm,
3349 __ and_(hash, mask);
3358 void NumberToStringStub::Generate(MacroAssembler* masm) {
3361 __ movq(
rbx, Operand(
rsp, kPointerSize));
3365 __ ret(1 * kPointerSize);
3369 __ TailCallRuntime(Runtime::kNumberToStringSkipCache, 1, 1);
3373 static int NegativeComparisonResult(
Condition cc) {
3384 Label check_unequal_objects, done;
3385 Factory* factory = masm->isolate()->factory();
3388 if (include_smi_compare_) {
3389 Label non_smi, smi_done;
3390 __ JumpIfNotBothSmi(
rax,
rdx, &non_smi);
3398 }
else if (FLAG_debug_code) {
3400 __ JumpIfNotSmi(
rdx, &ok);
3401 __ JumpIfNotSmi(
rax, &ok);
3402 __ Abort(
"CompareStub: smi operands");
3413 Label not_identical;
3420 Label check_for_nan;
3421 __ CompareRoot(
rdx, Heap::kUndefinedValueRootIndex);
3423 __ Set(
rax, NegativeComparisonResult(cc_));
3425 __ bind(&check_for_nan);
3433 if (never_nan_nan_ && (cc_ ==
equal)) {
3440 factory->heap_number_map());
3441 __ j(
equal, &heap_number, Label::kNear);
3450 __ bind(&heap_number);
3466 __ bind(¬_identical);
3485 factory->heap_number_map());
3501 Label first_non_object;
3503 __ j(
below, &first_non_object, Label::kNear);
3505 Label return_not_equal;
3507 __ bind(&return_not_equal);
3510 __ bind(&first_non_object);
3513 __ j(
equal, &return_not_equal);
3520 __ j(
equal, &return_not_equal);
3528 if (include_number_compare_) {
3529 Label non_number_comparison;
3531 FloatingPointHelper::LoadSSE2UnknownOperands(masm, &non_number_comparison);
3546 __ bind(&unordered);
3556 __ bind(&non_number_comparison);
3560 Label check_for_strings;
3571 __ bind(&check_for_strings);
3573 __ JumpIfNotBothSequentialAsciiStrings(
3594 __ Abort(
"Unexpected fall-through from string comparison");
3597 __ bind(&check_unequal_objects);
3598 if (cc_ ==
equal && !strict_) {
3602 Label not_both_objects, return_unequal;
3610 __ j(
not_zero, ¬_both_objects, Label::kNear);
3612 __ j(
below, ¬_both_objects, Label::kNear);
3614 __ j(
below, ¬_both_objects, Label::kNear);
3617 __ j(
zero, &return_unequal, Label::kNear);
3620 __ j(
zero, &return_unequal, Label::kNear);
3624 __ bind(&return_unequal);
3628 __ bind(¬_both_objects);
3639 builtin = strict_ ? Builtins::STRICT_EQUALS : Builtins::EQUALS;
3654 void CompareStub::BranchIfNonSymbol(MacroAssembler* masm,
3658 __ JumpIfSmi(
object, label);
3671 __ TailCallRuntime(Runtime::kStackGuard, 0, 1);
3676 __ TailCallRuntime(Runtime::kInterrupt, 0, 1);
3680 static void GenerateRecordCallTarget(MacroAssembler* masm) {
3686 Isolate* isolate = masm->isolate();
3687 Label initialize, done;
3695 __ j(
equal, &done, Label::kNear);
3697 __ j(
equal, &done, Label::kNear);
3702 __ j(
equal, &initialize, Label::kNear);
3707 __ jmp(&done, Label::kNear);
3710 __ bind(&initialize);
3721 Isolate* isolate = masm->isolate();
3722 Label slow, non_function;
3727 if (ReceiverMightBeImplicit()) {
3731 __ movq(
rax, Operand(
rsp, (argc_ + 1) * kPointerSize));
3733 __ CompareRoot(
rax, Heap::kTheHoleValueRootIndex);
3738 __ movq(Operand(
rsp, (argc_ + 1) * kPointerSize),
rcx);
3743 __ JumpIfSmi(
rdi, &non_function);
3748 if (RecordCallTarget()) {
3749 GenerateRecordCallTarget(masm);
3753 ParameterCount actual(argc_);
3755 if (ReceiverMightBeImplicit()) {
3756 Label call_as_function;
3757 __ CompareRoot(
rax, Heap::kTheHoleValueRootIndex);
3758 __ j(
equal, &call_as_function);
3759 __ InvokeFunction(
rdi,
3764 __ bind(&call_as_function);
3766 __ InvokeFunction(
rdi,
3774 if (RecordCallTarget()) {
3787 __ Set(
rax, argc_ + 1);
3790 __ GetBuiltinEntry(
rdx, Builtins::CALL_FUNCTION_PROXY);
3792 Handle<Code> adaptor =
3793 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline();
3794 __ jmp(adaptor, RelocInfo::CODE_TARGET);
3799 __ bind(&non_function);
3800 __ movq(Operand(
rsp, (argc_ + 1) * kPointerSize),
rdi);
3804 __ GetBuiltinEntry(
rdx, Builtins::CALL_NON_FUNCTION);
3805 Handle<Code> adaptor =
3806 Isolate::Current()->builtins()->ArgumentsAdaptorTrampoline();
3807 __ Jump(adaptor, RelocInfo::CODE_TARGET);
3815 Label slow, non_function_call;
3818 __ JumpIfSmi(
rdi, &non_function_call);
3823 if (RecordCallTarget()) {
3824 GenerateRecordCallTarget(masm);
3840 __ GetBuiltinEntry(
rdx, Builtins::CALL_FUNCTION_PROXY_AS_CONSTRUCTOR);
3843 __ bind(&non_function_call);
3844 __ GetBuiltinEntry(
rdx, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR);
3849 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
3850 RelocInfo::CODE_TARGET);
3854 bool CEntryStub::NeedsImmovableCode() {
3861 return result_size_ == 1;
3868 void CodeStub::GenerateStubsAheadOfTime() {
3876 void CodeStub::GenerateFPStubs() {
3882 stub.GetCode()->set_is_pregenerated(
true);
3884 save_doubles.GetCode()->set_is_pregenerated(
true);
3888 void CEntryStub::GenerateCore(MacroAssembler* masm,
3889 Label* throw_normal_exception,
3890 Label* throw_termination_exception,
3891 Label* throw_out_of_memory_exception,
3893 bool always_allocate_scope) {
3908 if (FLAG_debug_code) {
3909 __ CheckStackAlignment();
3928 ExternalReference scope_depth =
3929 ExternalReference::heap_always_allocate_scope_depth(masm->isolate());
3930 if (always_allocate_scope) {
3931 Operand scope_depth_operand = masm->ExternalOperand(scope_depth);
3932 __ incl(scope_depth_operand);
3941 if (result_size_ < 2) {
3945 __ LoadAddress(
rdx, ExternalReference::isolate_address());
3952 __ LoadAddress(
r8, ExternalReference::isolate_address());
3959 __ movq(
rdx, ExternalReference::isolate_address());
3964 if (always_allocate_scope) {
3965 Operand scope_depth_operand = masm->ExternalOperand(scope_depth);
3966 __ decl(scope_depth_operand);
3970 Label failure_returned;
3974 if (result_size_ > 1) {
3979 __ movq(
rax, Operand(
rsp, 6 * kPointerSize));
3980 __ movq(
rdx, Operand(
rsp, 7 * kPointerSize));
3986 __ j(
zero, &failure_returned);
3989 __ LeaveExitFrame(save_doubles_);
3993 __ bind(&failure_returned);
3999 __ j(
zero, &retry, Label::kNear);
4004 __ j(
equal, throw_out_of_memory_exception);
4007 ExternalReference pending_exception_address(
4008 Isolate::kPendingExceptionAddress, masm->isolate());
4009 Operand pending_exception_operand =
4010 masm->ExternalOperand(pending_exception_address);
4011 __ movq(
rax, pending_exception_operand);
4012 __ LoadRoot(
rdx, Heap::kTheHoleValueRootIndex);
4013 __ movq(pending_exception_operand,
rdx);
4017 __ CompareRoot(
rax, Heap::kTerminationExceptionRootIndex);
4018 __ j(
equal, throw_termination_exception);
4021 __ jmp(throw_normal_exception);
4042 int arg_stack_space = (result_size_ < 2 ? 2 : 4);
4044 int arg_stack_space = 0;
4046 __ EnterExitFrame(arg_stack_space, save_doubles_);
4060 Label throw_normal_exception;
4061 Label throw_termination_exception;
4062 Label throw_out_of_memory_exception;
4066 &throw_normal_exception,
4067 &throw_termination_exception,
4068 &throw_out_of_memory_exception,
4074 &throw_normal_exception,
4075 &throw_termination_exception,
4076 &throw_out_of_memory_exception,
4084 &throw_normal_exception,
4085 &throw_termination_exception,
4086 &throw_out_of_memory_exception,
4090 __ bind(&throw_out_of_memory_exception);
4092 Isolate* isolate = masm->isolate();
4093 ExternalReference external_caught(Isolate::kExternalCaughtExceptionAddress,
4095 __ Set(
rax, static_cast<int64_t>(
false));
4096 __ Store(external_caught,
rax);
4099 ExternalReference pending_exception(Isolate::kPendingExceptionAddress,
4102 __ Store(pending_exception,
rax);
4105 __ bind(&throw_termination_exception);
4106 __ ThrowUncatchable(
rax);
4108 __ bind(&throw_normal_exception);
4114 Label invoke, handler_entry, exit;
4115 Label not_outermost_js, not_outermost_js_2;
4117 MacroAssembler::NoRootArrayScope uninitialized_root_register(masm);
4123 int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY;
4147 __ InitializeSmiConstantRegister();
4148 __ InitializeRootRegister();
4151 Isolate* isolate = masm->isolate();
4154 ExternalReference c_entry_fp(Isolate::kCEntryFPAddress, isolate);
4156 Operand c_entry_fp_operand = masm->ExternalOperand(c_entry_fp);
4157 __ push(c_entry_fp_operand);
4161 ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate);
4167 __ Store(js_entry_sp,
rax);
4170 __ bind(¬_outermost_js);
4177 __ bind(&handler_entry);
4178 handler_offset_ = handler_entry.pos();
4181 ExternalReference pending_exception(Isolate::kPendingExceptionAddress,
4183 __ Store(pending_exception,
rax);
4190 __ PushTryHandler(StackHandler::JS_ENTRY, 0);
4193 __ LoadRoot(
rax, Heap::kTheHoleValueRootIndex);
4194 __ Store(pending_exception,
rax);
4197 __ push(Immediate(0));
4205 ExternalReference construct_entry(Builtins::kJSConstructEntryTrampoline,
4209 ExternalReference entry(Builtins::kJSEntryTrampoline, isolate);
4225 __ bind(¬_outermost_js_2);
4228 { Operand c_entry_fp_operand = masm->ExternalOperand(c_entry_fp);
4229 __ pop(c_entry_fp_operand);
4243 __ addq(
rsp, Immediate(2 * kPointerSize));
4266 static const int kOffsetToMapCheckValue = 2;
4267 static const int kOffsetToResultValue = 18;
4272 static const unsigned int kWordBeforeMapCheckValue = 0xBA49FF78;
4277 static const unsigned int kWordBeforeResultValue = 0x458B4909;
4280 int extra_stack_space = HasCallSiteInlineCheck() ? kPointerSize : 0;
4285 __ movq(
rax, Operand(
rsp, 2 * kPointerSize + extra_stack_space));
4286 __ JumpIfSmi(
rax, &slow);
4295 __ movq(
rdx, Operand(
rsp, 1 * kPointerSize + extra_stack_space));
4300 if (!HasCallSiteInlineCheck()) {
4303 __ CompareRoot(
rdx, Heap::kInstanceofCacheFunctionRootIndex);
4305 __ CompareRoot(
rax, Heap::kInstanceofCacheMapRootIndex);
4307 __ LoadRoot(
rax, Heap::kInstanceofCacheAnswerRootIndex);
4308 __ ret(2 * kPointerSize);
4312 __ TryGetFunctionPrototype(
rdx,
rbx, &slow,
true);
4315 __ JumpIfSmi(
rbx, &slow);
4325 if (!HasCallSiteInlineCheck()) {
4326 __ StoreRoot(
rdx, Heap::kInstanceofCacheFunctionRootIndex);
4327 __ StoreRoot(
rax, Heap::kInstanceofCacheMapRootIndex);
4332 if (FLAG_debug_code) {
4333 __ movl(
rdi, Immediate(kWordBeforeMapCheckValue));
4335 __ Assert(
equal,
"InstanceofStub unexpected call site cache (check).");
4345 Label loop, is_instance, is_not_instance;
4349 __ j(
equal, &is_instance, Label::kNear);
4353 __ j(
equal, &is_not_instance, Label::kNear);
4358 __ bind(&is_instance);
4359 if (!HasCallSiteInlineCheck()) {
4363 __ StoreRoot(
rax, Heap::kInstanceofCacheAnswerRootIndex);
4366 int true_offset = 0x100 +
4369 ASSERT(true_offset >= 0 && true_offset < 0x100);
4370 __ movl(
rax, Immediate(true_offset));
4374 if (FLAG_debug_code) {
4375 __ movl(
rax, Immediate(kWordBeforeResultValue));
4377 __ Assert(
equal,
"InstanceofStub unexpected call site cache (mov).");
4381 __ ret(2 * kPointerSize + extra_stack_space);
4383 __ bind(&is_not_instance);
4384 if (!HasCallSiteInlineCheck()) {
4389 int false_offset = 0x100 +
4392 ASSERT(false_offset >= 0 && false_offset < 0x100);
4393 __ movl(
rax, Immediate(false_offset));
4397 if (FLAG_debug_code) {
4398 __ movl(
rax, Immediate(kWordBeforeResultValue));
4400 __ Assert(
equal,
"InstanceofStub unexpected call site cache (mov)");
4403 __ ret(2 * kPointerSize + extra_stack_space);
4407 if (HasCallSiteInlineCheck()) {
4424 int CompareStub::MinorKey() {
4428 ASSERT(static_cast<unsigned>(cc_) < (1 << 12));
4430 return ConditionField::encode(static_cast<unsigned>(cc_))
4431 | RegisterField::encode(
false)
4432 | StrictField::encode(strict_)
4433 | NeverNanNanField::encode(cc_ ==
equal ? never_nan_nan_ :
false)
4434 | IncludeNumberCompareField::encode(include_number_compare_)
4435 | IncludeSmiCompareField::encode(include_smi_compare_);
4441 void CompareStub::PrintName(StringStream* stream) {
4443 const char* cc_name;
4445 case less: cc_name =
"LT";
break;
4446 case greater: cc_name =
"GT";
break;
4449 case equal: cc_name =
"EQ";
break;
4451 default: cc_name =
"UnknownCondition";
break;
4454 stream->Add(
"CompareStub_%s", cc_name);
4455 if (strict_ && is_equality) stream->Add(
"_STRICT");
4456 if (never_nan_nan_ && is_equality) stream->Add(
"_NO_NAN");
4457 if (!include_number_compare_) stream->Add(
"_NO_NUMBER");
4458 if (!include_smi_compare_) stream->Add(
"_NO_SMI");
4468 Label got_char_code;
4469 Label sliced_string;
4472 __ JumpIfSmi(object_, receiver_not_string_);
4482 __ JumpIfNotSmi(index_, &index_not_smi_);
4483 __ bind(&got_smi_index_);
4489 __ SmiToInteger32(index_, index_);
4492 masm, object_, index_, result_, &call_runtime_);
4494 __ Integer32ToSmi(result_, result_);
4500 MacroAssembler* masm,
4501 const RuntimeCallHelper& call_helper) {
4502 __ Abort(
"Unexpected fallthrough to CharCodeAt slow case");
4504 Factory* factory = masm->isolate()->factory();
4506 __ bind(&index_not_smi_);
4509 factory->heap_number_map(),
4512 call_helper.BeforeCall(masm);
4516 __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero, 1);
4520 __ CallRuntime(Runtime::kNumberToSmi, 1);
4522 if (!index_.
is(
rax)) {
4525 __ movq(index_,
rax);
4531 call_helper.AfterCall(masm);
4533 __ JumpIfNotSmi(index_, index_out_of_range_);
4535 __ jmp(&got_smi_index_);
4540 __ bind(&call_runtime_);
4541 call_helper.BeforeCall(masm);
4543 __ Integer32ToSmi(index_, index_);
4545 __ CallRuntime(Runtime::kStringCharCodeAt, 2);
4546 if (!result_.
is(
rax)) {
4547 __ movq(result_,
rax);
4549 call_helper.AfterCall(masm);
4552 __ Abort(
"Unexpected fallthrough from CharCodeAt slow case");
4561 __ JumpIfNotSmi(code_, &slow_case_);
4565 __ LoadRoot(result_, Heap::kSingleCharacterStringCacheRootIndex);
4569 __ CompareRoot(result_, Heap::kUndefinedValueRootIndex);
4576 MacroAssembler* masm,
4577 const RuntimeCallHelper& call_helper) {
4578 __ Abort(
"Unexpected fallthrough to CharFromCode slow case");
4580 __ bind(&slow_case_);
4581 call_helper.BeforeCall(masm);
4583 __ CallRuntime(Runtime::kCharFromCode, 1);
4584 if (!result_.
is(
rax)) {
4585 __ movq(result_,
rax);
4587 call_helper.AfterCall(masm);
4590 __ Abort(
"Unexpected fallthrough from CharFromCode slow case");
4604 MacroAssembler* masm,
4605 const RuntimeCallHelper& call_helper) {
4606 char_code_at_generator_.
GenerateSlow(masm, call_helper);
4607 char_from_code_generator_.
GenerateSlow(masm, call_helper);
4611 void StringAddStub::Generate(MacroAssembler* masm) {
4612 Label call_runtime, call_builtin;
4616 __ movq(
rax, Operand(
rsp, 2 * kPointerSize));
4617 __ movq(
rdx, Operand(
rsp, 1 * kPointerSize));
4621 __ JumpIfSmi(
rax, &call_runtime);
4626 __ JumpIfSmi(
rdx, &call_runtime);
4634 GenerateConvertArgument(masm, 2 * kPointerSize,
rax,
rbx,
rcx,
rdi,
4636 builtin_id = Builtins::STRING_ADD_RIGHT;
4638 ASSERT((flags_ & NO_STRING_CHECK_LEFT_IN_STUB) != 0);
4639 GenerateConvertArgument(masm, 1 * kPointerSize,
rdx,
rbx,
rcx,
rdi,
4641 builtin_id = Builtins::STRING_ADD_LEFT;
4649 Label second_not_zero_length, both_not_zero_length;
4652 __ j(
not_zero, &second_not_zero_length, Label::kNear);
4654 Counters* counters = masm->isolate()->counters();
4655 __ IncrementCounter(counters->string_add_native(), 1);
4656 __ ret(2 * kPointerSize);
4657 __ bind(&second_not_zero_length);
4660 __ j(
not_zero, &both_not_zero_length, Label::kNear);
4663 __ IncrementCounter(counters->string_add_native(), 1);
4664 __ ret(2 * kPointerSize);
4673 Label string_add_flat_result, longer_than_two;
4674 __ bind(&both_not_zero_length);
4695 __ JumpIfBothInstanceTypesAreNotSequentialAscii(
r8,
r9,
rbx,
rcx,
4704 Label make_two_character_string, make_flat_ascii_string;
4707 __ IncrementCounter(counters->string_add_native(), 1);
4708 __ ret(2 * kPointerSize);
4710 __ bind(&make_two_character_string);
4721 __ IncrementCounter(counters->string_add_native(), 1);
4722 __ ret(2 * kPointerSize);
4724 __ bind(&longer_than_two);
4727 __ j(
below, &string_add_flat_result);
4740 Label non_ascii, allocated, ascii_data;
4747 __ bind(&ascii_data);
4750 __ bind(&allocated);
4758 __ IncrementCounter(counters->string_add_native(), 1);
4759 __ ret(2 * kPointerSize);
4760 __ bind(&non_ascii);
4786 Label first_prepared, second_prepared;
4787 Label first_is_sequential, second_is_sequential;
4788 __ bind(&string_add_flat_result);
4794 __ j(
zero, &first_is_sequential, Label::kNear);
4800 __ jmp(&first_prepared, Label::kNear);
4801 __ bind(&first_is_sequential);
4804 __ bind(&first_prepared);
4815 __ j(
zero, &second_is_sequential, Label::kNear);
4821 __ jmp(&second_prepared, Label::kNear);
4822 __ bind(&second_is_sequential);
4825 __ bind(&second_prepared);
4827 Label non_ascii_string_add_flat_result;
4833 __ j(
zero, &non_ascii_string_add_flat_result);
4835 __ bind(&make_flat_ascii_string);
4849 __ IncrementCounter(counters->string_add_native(), 1);
4850 __ ret(2 * kPointerSize);
4852 __ bind(&non_ascii_string_add_flat_result);
4866 __ IncrementCounter(counters->string_add_native(), 1);
4867 __ ret(2 * kPointerSize);
4870 __ bind(&call_runtime);
4871 __ TailCallRuntime(Runtime::kStringAdd, 2, 1);
4873 if (call_builtin.is_linked()) {
4874 __ bind(&call_builtin);
4880 void StringAddStub::GenerateConvertArgument(MacroAssembler* masm,
4888 Label not_string, done;
4889 __ JumpIfSmi(arg, ¬_string);
4895 __ bind(¬_string);
4904 __ movq(arg, scratch1);
4905 __ movq(Operand(
rsp, stack_offset), arg);
4909 __ bind(¬_cached);
4910 __ JumpIfSmi(arg, slow);
4917 __ movq(Operand(
rsp, stack_offset), arg);
4940 __ addq(src, Immediate(2));
4941 __ addq(dest, Immediate(2));
4963 __ testl(count, count);
4964 __ j(
zero, &done, Label::kNear);
4969 __ addl(count, count);
4974 __ testl(count, Immediate(~7));
4975 __ j(
zero, &last_bytes, Label::kNear);
4979 __ shr(count, Immediate(3));
4984 __ and_(count, Immediate(7));
4987 __ bind(&last_bytes);
4988 __ testl(count, count);
4989 __ j(
zero, &done, Label::kNear);
5013 Register scratch = scratch3;
5017 Label not_array_index;
5018 __ leal(scratch, Operand(c1, -
'0'));
5019 __ cmpl(scratch, Immediate(static_cast<int>(
'9' -
'0')));
5020 __ j(
above, ¬_array_index, Label::kNear);
5021 __ leal(scratch, Operand(c2, -
'0'));
5022 __ cmpl(scratch, Immediate(static_cast<int>(
'9' -
'0')));
5025 __ bind(¬_array_index);
5027 Register hash = scratch1;
5033 Register chars = c1;
5041 Register symbol_table = c2;
5045 Register mask = scratch2;
5046 __ SmiToInteger32(mask,
5050 Register map = scratch4;
5061 static const int kProbes = 4;
5062 Label found_in_symbol_table;
5063 Label next_probe[kProbes];
5064 Register candidate = scratch;
5065 for (
int i = 0; i < kProbes; i++) {
5067 __ movl(scratch, hash);
5069 __ addl(scratch, Immediate(SymbolTable::GetProbeOffset(i)));
5071 __ andl(scratch, mask);
5086 __ CompareRoot(candidate, Heap::kUndefinedValueRootIndex);
5089 if (FLAG_debug_code) {
5092 __ Assert(
equal,
"oddball in symbol table is not undefined or the hole");
5094 __ jmp(&next_probe[i]);
5096 __ bind(&is_string);
5109 __ JumpIfInstanceTypeIsNotSequentialAscii(
5110 temp, temp, &next_probe[i]);
5114 __ andl(temp, Immediate(0x0000ffff));
5115 __ cmpl(chars, temp);
5116 __ j(
equal, &found_in_symbol_table);
5117 __ bind(&next_probe[i]);
5124 Register result = candidate;
5125 __ bind(&found_in_symbol_table);
5126 if (!result.is(
rax)) {
5127 __ movq(
rax, result);
5137 __ LoadRoot(scratch, Heap::kHashSeedRootIndex);
5138 __ SmiToInteger32(scratch, scratch);
5139 __ addl(scratch, character);
5140 __ movl(hash, scratch);
5141 __ shll(scratch, Immediate(10));
5142 __ addl(hash, scratch);
5144 __ movl(scratch, hash);
5145 __ shrl(scratch, Immediate(6));
5146 __ xorl(hash, scratch);
5155 __ addl(hash, character);
5157 __ movl(scratch, hash);
5158 __ shll(scratch, Immediate(10));
5159 __ addl(hash, scratch);
5161 __ movl(scratch, hash);
5162 __ shrl(scratch, Immediate(6));
5163 __ xorl(hash, scratch);
5171 __ leal(hash, Operand(hash, hash,
times_8, 0));
5173 __ movl(scratch, hash);
5174 __ shrl(scratch, Immediate(11));
5175 __ xorl(hash, scratch);
5177 __ movl(scratch, hash);
5178 __ shll(scratch, Immediate(15));
5179 __ addl(hash, scratch);
5184 Label hash_not_zero;
5187 __ bind(&hash_not_zero);
5190 void SubStringStub::Generate(MacroAssembler* masm) {
5202 const int kArgumentsSize = (kStringOffset +
kPointerSize) - kToOffset;
5205 __ movq(
rax, Operand(
rsp, kStringOffset));
5215 __ movq(
rcx, Operand(
rsp, kToOffset));
5216 __ movq(
rdx, Operand(
rsp, kFromOffset));
5217 __ JumpUnlessBothNonNegativeSmi(
rcx,
rdx, &runtime);
5221 Label not_original_string;
5223 __ j(
below, ¬_original_string, Label::kNear);
5227 Counters* counters = masm->isolate()->counters();
5228 __ IncrementCounter(counters->sub_string_native(), 1);
5229 __ ret(kArgumentsSize);
5230 __ bind(¬_original_string);
5239 Label underlying_unpacked, sliced_string, seq_or_external_string;
5244 __ j(
zero, &seq_or_external_string, Label::kNear);
5247 __ j(
not_zero, &sliced_string, Label::kNear);
5251 Heap::kEmptyStringRootIndex);
5257 __ jmp(&underlying_unpacked, Label::kNear);
5259 __ bind(&sliced_string);
5266 __ jmp(&underlying_unpacked, Label::kNear);
5268 __ bind(&seq_or_external_string);
5272 __ bind(&underlying_unpacked);
5274 if (FLAG_string_slices) {
5284 __ j(
less, ©_routine);
5290 Label two_byte_slice, set_slice_header;
5294 __ j(
zero, &two_byte_slice, Label::kNear);
5295 __ AllocateAsciiSlicedString(
rax,
rbx,
r14, &runtime);
5296 __ jmp(&set_slice_header, Label::kNear);
5297 __ bind(&two_byte_slice);
5298 __ AllocateTwoByteSlicedString(
rax,
rbx,
r14, &runtime);
5299 __ bind(&set_slice_header);
5306 __ IncrementCounter(counters->sub_string_native(), 1);
5307 __ ret(kArgumentsSize);
5309 __ bind(©_routine);
5318 Label two_byte_sequential, sequential_string;
5322 __ j(
zero, &sequential_string);
5334 __ bind(&sequential_string);
5337 __ j(
zero, &two_byte_sequential);
5346 SmiIndex smi_as_index = masm->SmiToIndex(
rdx,
rdx,
times_1);
5347 __ lea(
rsi, Operand(
rdi, smi_as_index.reg, smi_as_index.scale,
5360 __ IncrementCounter(counters->sub_string_native(), 1);
5361 __ ret(kArgumentsSize);
5363 __ bind(&two_byte_sequential);
5371 SmiIndex smi_as_index = masm->SmiToIndex(
rdx,
rdx,
times_2);
5372 __ lea(
rsi, Operand(
rdi, smi_as_index.reg, smi_as_index.scale,
5385 __ IncrementCounter(counters->sub_string_native(), 1);
5386 __ ret(kArgumentsSize);
5390 __ TailCallRuntime(Runtime::kSubString, 3, 1);
5398 Register scratch2) {
5399 Register length = scratch1;
5402 Label check_zero_length;
5405 __ j(
equal, &check_zero_length, Label::kNear);
5410 Label compare_chars;
5411 __ bind(&check_zero_length);
5414 __ j(
not_zero, &compare_chars, Label::kNear);
5419 __ bind(&compare_chars);
5420 Label strings_not_equal;
5421 GenerateAsciiCharsCompareLoop(masm, left, right, length, scratch2,
5422 &strings_not_equal, Label::kNear);
5429 __ bind(&strings_not_equal);
5441 Register scratch4) {
5448 __ movq(scratch4, scratch1);
5453 const Register length_difference = scratch4;
5455 __ j(
less, &left_shorter, Label::kNear);
5459 __ SmiSub(scratch1, scratch1, length_difference);
5460 __ bind(&left_shorter);
5462 const Register min_length = scratch1;
5464 Label compare_lengths;
5466 __ SmiTest(min_length);
5467 __ j(
zero, &compare_lengths, Label::kNear);
5470 Label result_not_equal;
5471 GenerateAsciiCharsCompareLoop(masm, left, right, min_length, scratch2,
5472 &result_not_equal, Label::kNear);
5476 __ bind(&compare_lengths);
5477 __ SmiTest(length_difference);
5478 __ j(
not_zero, &result_not_equal, Label::kNear);
5484 Label result_greater;
5485 __ bind(&result_not_equal);
5487 __ j(
greater, &result_greater, Label::kNear);
5494 __ bind(&result_greater);
5500 void StringCompareStub::GenerateAsciiCharsCompareLoop(
5501 MacroAssembler* masm,
5506 Label* chars_not_equal,
5507 Label::Distance near_jump) {
5511 __ SmiToInteger32(length, length);
5517 Register index = length;
5522 __ movb(scratch, Operand(left, index,
times_1, 0));
5523 __ cmpb(scratch, Operand(right, index,
times_1, 0));
5530 void StringCompareStub::Generate(MacroAssembler* masm) {
5538 __ movq(
rdx, Operand(
rsp, 2 * kPointerSize));
5539 __ movq(
rax, Operand(
rsp, 1 * kPointerSize));
5546 Counters* counters = masm->isolate()->counters();
5547 __ IncrementCounter(counters->string_compare_native(), 1);
5548 __ ret(2 * kPointerSize);
5553 __ JumpIfNotBothSequentialAsciiStrings(
rdx,
rax,
rcx,
rbx, &runtime);
5556 __ IncrementCounter(counters->string_compare_native(), 1);
5559 __ addq(
rsp, Immediate(2 * kPointerSize));
5566 __ TailCallRuntime(Runtime::kStringCompare, 2, 1);
5570 void ICCompareStub::GenerateSmis(MacroAssembler* masm) {
5573 __ JumpIfNotBothSmi(
rdx,
rax, &miss, Label::kNear);
5575 if (GetCondition() ==
equal) {
5594 void ICCompareStub::GenerateHeapNumbers(MacroAssembler* masm) {
5598 Label unordered, maybe_undefined1, maybe_undefined2;
5601 __ j(either_smi, &generic_stub, Label::kNear);
5620 __ movl(
rax, Immediate(0));
5621 __ movl(
rcx, Immediate(0));
5626 __ bind(&unordered);
5628 __ bind(&generic_stub);
5629 __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET);
5631 __ bind(&maybe_undefined1);
5633 __ Cmp(
rax, masm->isolate()->factory()->undefined_value());
5640 __ bind(&maybe_undefined2);
5642 __ Cmp(
rdx, masm->isolate()->factory()->undefined_value());
5651 void ICCompareStub::GenerateSymbols(MacroAssembler* masm) {
5656 Register left =
rdx;
5657 Register right =
rax;
5658 Register tmp1 =
rcx;
5659 Register tmp2 =
rbx;
5663 Condition cond = masm->CheckEitherSmi(left, right, tmp1);
5664 __ j(cond, &miss, Label::kNear);
5672 __ and_(tmp1, tmp2);
5674 __ j(
zero, &miss, Label::kNear);
5678 __ cmpq(left, right);
5694 void ICCompareStub::GenerateStrings(MacroAssembler* masm) {
5701 Register left =
rdx;
5702 Register right =
rax;
5703 Register tmp1 =
rcx;
5704 Register tmp2 =
rbx;
5705 Register tmp3 =
rdi;
5708 Condition cond = masm->CheckEitherSmi(left, right, tmp1);
5717 __ movq(tmp3, tmp1);
5725 __ cmpq(left, right);
5740 __ and_(tmp1, tmp2);
5742 __ j(
zero, &do_compare, Label::kNear);
5747 __ bind(&do_compare);
5752 __ JumpIfNotBothSequentialAsciiStrings(left, right, tmp1, tmp2, &runtime);
5757 masm, left, right, tmp1, tmp2);
5770 __ TailCallRuntime(Runtime::kStringEquals, 2, 1);
5772 __ TailCallRuntime(Runtime::kStringCompare, 2, 1);
5780 void ICCompareStub::GenerateObjects(MacroAssembler* masm) {
5784 __ j(either_smi, &miss, Label::kNear);
5800 void ICCompareStub::GenerateKnownObjects(MacroAssembler* masm) {
5803 __ j(either_smi, &miss, Label::kNear);
5807 __ Cmp(
rcx, known_map_);
5809 __ Cmp(
rbx, known_map_);
5820 void ICCompareStub::GenerateMiss(MacroAssembler* masm) {
5823 ExternalReference miss =
5824 ExternalReference(IC_Utility(IC::kCompareIC_Miss), masm->isolate());
5832 __ CallExternalReference(miss, 3);
5848 Register properties,
5849 Handle<String> name,
5856 for (
int i = 0; i < kInlinedProbes; i++) {
5859 Register index =
r0;
5861 __ SmiToInteger32(index,
FieldOperand(properties, kCapacityOffset));
5864 Immediate(name->Hash() + StringDictionary::GetProbeOffset(i)));
5868 __ lea(index, Operand(index, index,
times_2, 0));
5870 Register entity_name =
r0;
5873 __ movq(entity_name, Operand(properties,
5877 __ Cmp(entity_name, masm->isolate()->factory()->undefined_value());
5881 __ Cmp(entity_name, Handle<String>(name));
5886 __ CompareRoot(entity_name, Heap::kTheHoleValueRootIndex);
5887 __ j(
equal, &the_hole, Label::kNear);
5902 __ Push(Handle<Object>(name));
5903 __ push(Immediate(name->Hash()));
5922 ASSERT(!elements.is(r0));
5923 ASSERT(!elements.is(r1));
5927 __ AssertString(name);
5932 for (
int i = 0; i < kInlinedProbes; i++) {
5937 __ addl(r1, Immediate(StringDictionary::GetProbeOffset(i)));
5943 __ lea(r1, Operand(r1, r1,
times_2, 0));
5982 Label in_dictionary, maybe_in_dictionary, not_in_dictionary;
5984 Register scratch = result_;
5986 __ SmiToInteger32(scratch,
FieldOperand(dictionary_, kCapacityOffset));
5995 for (
int i = kInlinedProbes; i < kTotalProbes; i++) {
5997 __ movq(scratch, Operand(
rsp, 2 * kPointerSize));
5999 __ addl(scratch, Immediate(StringDictionary::GetProbeOffset(i)));
6001 __ and_(scratch, Operand(
rsp, 0));
6005 __ lea(index_, Operand(scratch, scratch,
times_2, 0));
6008 __ movq(scratch, Operand(dictionary_,
6013 __ Cmp(scratch, masm->isolate()->factory()->undefined_value());
6014 __ j(
equal, ¬_in_dictionary);
6017 __ cmpq(scratch, Operand(
rsp, 3 * kPointerSize));
6029 __ j(
zero, &maybe_in_dictionary);
6033 __ bind(&maybe_in_dictionary);
6038 __ movq(scratch, Immediate(0));
6040 __ ret(2 * kPointerSize);
6043 __ bind(&in_dictionary);
6044 __ movq(scratch, Immediate(1));
6046 __ ret(2 * kPointerSize);
6048 __ bind(¬_in_dictionary);
6049 __ movq(scratch, Immediate(0));
6051 __ ret(2 * kPointerSize);
6055 struct AheadOfTimeWriteBarrierStubList {
6056 Register object, value, address;
6061 #define REG(Name) { kRegister_ ## Name ## _Code }
6063 struct AheadOfTimeWriteBarrierStubList kAheadOfTime[] = {
6104 for (AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime;
6105 !entry->object.is(
no_reg);
6107 if (object_.
is(entry->object) &&
6108 value_.
is(entry->value) &&
6109 address_.
is(entry->address) &&
6110 remembered_set_action_ == entry->action &&
6121 stub1.GetCode()->set_is_pregenerated(
true);
6123 stub2.GetCode()->set_is_pregenerated(
true);
6128 for (AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime;
6129 !entry->object.is(
no_reg);
6136 stub.GetCode()->set_is_pregenerated(
true);
6141 bool CodeStub::CanUseFPRegisters() {
6150 void RecordWriteStub::Generate(MacroAssembler* masm) {
6151 Label skip_to_incremental_noncompacting;
6152 Label skip_to_incremental_compacting;
6159 __ jmp(&skip_to_incremental_noncompacting, Label::kNear);
6160 __ jmp(&skip_to_incremental_compacting, Label::kFar);
6163 __ RememberedSetHelper(object_,
6172 __ bind(&skip_to_incremental_noncompacting);
6175 __ bind(&skip_to_incremental_compacting);
6185 void RecordWriteStub::GenerateIncremental(MacroAssembler* masm, Mode mode) {
6189 Label dont_need_remembered_set;
6191 __ movq(regs_.scratch0(), Operand(regs_.address(), 0));
6192 __ JumpIfNotInNewSpace(regs_.scratch0(),
6194 &dont_need_remembered_set);
6196 __ CheckPageFlag(regs_.object(),
6200 &dont_need_remembered_set);
6204 CheckNeedsToInformIncrementalMarker(
6205 masm, kUpdateRememberedSetOnNoNeedToInformIncrementalMarker, mode);
6206 InformIncrementalMarker(masm, mode);
6207 regs_.Restore(masm);
6208 __ RememberedSetHelper(object_,
6214 __ bind(&dont_need_remembered_set);
6217 CheckNeedsToInformIncrementalMarker(
6218 masm, kReturnOnNoNeedToInformIncrementalMarker, mode);
6219 InformIncrementalMarker(masm, mode);
6220 regs_.Restore(masm);
6225 void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm, Mode mode) {
6226 regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode_);
6229 Register arg2 =
rdx;
6230 Register arg1 =
rcx;
6232 Register arg3 =
rdx;
6233 Register arg2 =
rsi;
6234 Register arg1 =
rdi;
6238 ASSERT(!address.is(regs_.object()));
6239 ASSERT(!address.is(arg1));
6240 __ Move(address, regs_.address());
6241 __ Move(arg1, regs_.object());
6244 __ Move(arg2, address);
6247 __ movq(arg2, Operand(address, 0));
6249 __ LoadAddress(arg3, ExternalReference::isolate_address());
6250 int argument_count = 3;
6252 AllowExternalCallThatCantCauseGC scope(masm);
6253 __ PrepareCallCFunction(argument_count);
6256 ExternalReference::incremental_evacuation_record_write_function(
6262 ExternalReference::incremental_marking_record_write_function(
6266 regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode_);
6270 void RecordWriteStub::CheckNeedsToInformIncrementalMarker(
6271 MacroAssembler* masm,
6272 OnNoNeedToInformIncrementalMarker on_no_need,
6275 Label need_incremental;
6276 Label need_incremental_pop_object;
6279 __ and_(regs_.scratch0(), regs_.object());
6280 __ movq(regs_.scratch1(),
6281 Operand(regs_.scratch0(),
6283 __ subq(regs_.scratch1(), Immediate(1));
6284 __ movq(Operand(regs_.scratch0(),
6291 __ JumpIfBlack(regs_.object(),
6297 regs_.Restore(masm);
6298 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
6299 __ RememberedSetHelper(object_,
6311 __ movq(regs_.scratch0(), Operand(regs_.address(), 0));
6314 Label ensure_not_white;
6316 __ CheckPageFlag(regs_.scratch0(),
6323 __ CheckPageFlag(regs_.object(),
6329 __ bind(&ensure_not_white);
6334 __ push(regs_.object());
6335 __ EnsureNotWhite(regs_.scratch0(),
6338 &need_incremental_pop_object,
6340 __ pop(regs_.object());
6342 regs_.Restore(masm);
6343 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
6344 __ RememberedSetHelper(object_,
6353 __ bind(&need_incremental_pop_object);
6354 __ pop(regs_.object());
6356 __ bind(&need_incremental);
6362 void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) {
6373 Label double_elements;
6375 Label slow_elements;
6376 Label fast_elements;
6378 __ CheckFastElements(
rdi, &double_elements);
6381 __ JumpIfSmi(
rax, &smi_element);
6382 __ CheckFastSmiElements(
rdi, &fast_elements);
6387 __ bind(&slow_elements);
6398 __ TailCallRuntime(Runtime::kStoreArrayLiteralElement, 5, 1);
6401 __ bind(&fast_elements);
6416 __ bind(&smi_element);
6424 __ bind(&double_elements);
6428 __ StoreNumberToDoubleElements(
rax,
6438 if (entry_hook_ !=
NULL) {
6440 masm->CallStub(&stub);
6445 void ProfileEntryHookStub::Generate(MacroAssembler* masm) {
6455 const int kNumSavedRegisters = 1;
6459 const int kNumSavedRegisters = 3;
6468 __ lea(
rdx, Operand(
rsp, kNumSavedRegisters * kPointerSize));
6470 __ lea(
rsi, Operand(
rsp, kNumSavedRegisters * kPointerSize));
6486 AllowExternalCallThatCantCauseGC scope(masm);
6488 const int kArgumentCount = 2;
6489 __ PrepareCallCFunction(kArgumentCount);
6490 __ CallCFunction(
rax, kArgumentCount);
6508 #endif // V8_TARGET_ARCH_X64
static const int kResourceDataOffset
static const int kCallerFPOffset
void GenerateFast(MacroAssembler *masm)
void GenerateSlow(MacroAssembler *masm, const RuntimeCallHelper &call_helper)
static const int kLengthOffset
static const int kBitFieldOffset
void GenerateFast(MacroAssembler *masm)
STATIC_CHECK((kStringRepresentationMask|kStringEncodingMask)==Internals::kFullStringRepresentationMask)
static const int kMaxLength
const intptr_t kSmiTagMask
static const int kCodeOffset
static const int kEvacuationCandidateMask
#define CHECK_EQ(expected, value)
void GenerateSlow(MacroAssembler *masm, const RuntimeCallHelper &call_helper)
static const int kCodeEntryOffset
static const int kMaxAsciiCharCode
static const int kPrototypeOrInitialMapOffset
#define COMPARE(asm_, compare_string)
static int SlotOffset(int index)
RecordWriteStub(Register object, Register value, Register address, RememberedSetAction remembered_set_action, SaveFPRegsMode fp_mode)
static const int kArgumentsObjectSize
static void GenerateFixedRegStubsAheadOfTime()
const uint32_t kTwoByteStringTag
const int kFailureTypeTagSize
static const char * GetName(TypeInfo type_info)
static Failure * InternalError()
static void GenerateCopyCharacters(MacroAssembler *masm, Register dest, Register src, Register count, Register scratch, bool ascii)
static void PerformGC(Object *result)
static const char * Name(Value tok)
static Smi * FromInt(int value)
void Generate(MacroAssembler *masm)
static const byte kTwoByteNopInstruction
static const int kOptimizedCodeMapOffset
static const int kDataOffset
static const int kGlobalReceiverOffset
static const int kJSRegexpStaticOffsetsVectorSize
void Generate(MacroAssembler *masm)
static Failure * OutOfMemoryException()
static const int kEmptyHashField
static void GenerateHashGetHash(MacroAssembler *masm, Register hash)
static const int kExponentBias
static const intptr_t kPageAlignmentMask
static Handle< Object > UninitializedSentinel(Isolate *isolate)
static Failure * Exception()
void Generate(MacroAssembler *masm)
virtual bool IsPregenerated()
void Generate(MacroAssembler *masm)
static const int kCallerSPOffset
bool CanBeUndetectable() const
#define ASSERT(condition)
const int kPointerSizeLog2
static const int kShortCallInstructionLength
static const int kInstanceSizeOffset
static void GenerateCompareFlatAsciiStrings(MacroAssembler *masm, Register left, Register right, Register scratch1, Register scratch2, Register scratch3, Register scratch4)
static Handle< Object > MegamorphicSentinel(Isolate *isolate)
static const char * GetName(TypeInfo type_info)
const uint32_t kStringRepresentationMask
static void GenerateOperation(MacroAssembler *masm, TranscendentalCache::Type type)
MemOperand GlobalObjectOperand()
static const int kEntrySize
MemOperand ContextOperand(Register context, int index)
static const int kContextOffset
const uint32_t kAsciiDataHintTag
const uint32_t kShortExternalStringMask
static void GenerateNegativeLookup(MacroAssembler *masm, Label *miss, Label *done, Register receiver, Register properties, Handle< String > name, Register scratch0)
static const int kLastSubjectOffset
static const int kZeroHash
void Generate(MacroAssembler *masm)
static const int kHashFieldOffset
static const int kLastCaptureCountOffset
static const int kFirstOffset
static const int kMinLength
StringDictionaryLookupStub(LookupMode mode)
const uint32_t kNotStringTag
static const int kParentOffset
static const int kLiteralsOffset
static const int kArgumentsObjectSizeStrict
static void GenerateCopyCharactersREP(MacroAssembler *masm, Register dest, Register src, Register count, Register scratch, bool ascii)
STATIC_ASSERT((FixedDoubleArray::kHeaderSize &kDoubleAlignmentMask)==0)
static const int kLengthOffset
static const int kCapacityOffset
const uint32_t kIsSymbolMask
const intptr_t kFailureTagMask
static const int kValueOffset
const int kFailureTagSize
static void GenerateFlatAsciiStringEquals(MacroAssembler *masm, Register left, Register right, Register scratch1, Register scratch2, Register scratch3)
static const int kIrregexpCaptureCountOffset
static const int kInputOffset
static const size_t kWriteBarrierCounterOffset
static bool IsBitOp(Value op)
const uint32_t kIsIndirectStringMask
void Generate(MacroAssembler *masm)
virtual bool IsPregenerated()
static const int kStringWrapperSafeForDefaultValueOf
static void MaybeCallEntryHook(MacroAssembler *masm)
Operand FieldOperand(Register object, int offset)
const uint32_t kAsciiDataHintMask
Operand StackSpaceOperand(int index)
static const byte kFiveByteNopInstruction
void Generate(MacroAssembler *masm)
static const int kPropertiesOffset
static const int kMinLength
const uint32_t kShortExternalStringTag
static void GenerateHashAddCharacter(MacroAssembler *masm, Register hash, Register character)
static void Generate(MacroAssembler *masm, Register string, Register index, Register result, Label *call_runtime)
static const int kHeaderSize
static const int kNextFunctionLinkOffset
static int SizeFor(int length)
static const int kElementsOffset
const int kRootRegisterBias
const uint32_t kStringTag
static bool IsEqualityOp(Value op)
static const int kOffsetOffset
void Generate(MacroAssembler *masm)
static const int kLengthOffset
static int SizeFor(int length)
void GenerateSlow(MacroAssembler *masm, const RuntimeCallHelper &call_helper)
virtual void Generate(MacroAssembler *masm)
static const int kLastMatchOverhead
static const int kHeaderSize
void Generate(MacroAssembler *masm)
static const int kMapOffset
bool is(Register reg) const
static const int kSkipEvacuationSlotsRecordingMask
const uint32_t kIsNotStringMask
static void GenerateFixedRegStubsAheadOfTime()
const uint32_t kSlicedNotConsMask
static const int kLengthOffset
void Generate(MacroAssembler *masm)
void Generate(MacroAssembler *masm)
static const int kSecondOffset
static void GeneratePositiveLookup(MacroAssembler *masm, Label *miss, Label *done, Register elements, Register name, Register r0, Register r1)
static const int kArgumentsLengthIndex
const Register kScratchRegister
static const int kContextOffset
static const int kFunctionOffset
static const int kFirstCaptureOffset
static const uint32_t kHashBitMask
static const int kLastInputOffset
v8::Handle< v8::Value > Load(const v8::Arguments &args)
static const int kHeaderSize
void GenerateBody(MacroAssembler *masm, bool is_construct)
static const int kDataAsciiCodeOffset
Condition NegateCondition(Condition cond)
#define ASSERT_EQ(v1, v2)
static void GenerateAheadOfTime()
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print more details following each garbage collection print amount of external allocated memory after each time it is adjusted flush code that we expect not to use again before full gc do incremental marking steps track object counts and memory usage use caching Perform compaction on every full GC Never perform compaction on full GC testing only Compact code space on full incremental collections Default seed for initializing random allows verbose printing trace parsing and preparsing Check icache flushes in ARM and MIPS simulator Stack alingment in bytes in print stack trace when throwing exceptions randomize hashes to avoid predictable hash Fixed seed to use to hash property activate a timer that switches between V8 threads testing_bool_flag float flag Seed used for threading test randomness A filename with extra code to be included in the Print usage message
static const int kArgumentsCalleeIndex
static const int kIsUndetectable
static const int kHeaderSize
void Generate(MacroAssembler *masm)
static const int kEntryLength
void GenerateFast(MacroAssembler *masm)
static void GenerateLookupNumberStringCache(MacroAssembler *masm, Register object, Register result, Register scratch1, Register scratch2, Register scratch3, bool object_is_smi, Label *not_found)
static const int kDataTagOffset
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if NULL
static const int kPrototypeOffset
static const int kElementsStartOffset
#define RUNTIME_ENTRY(name, nargs, ressize)
static void GenerateTwoCharacterSymbolTableProbe(MacroAssembler *masm, Register c1, Register c2, Register scratch1, Register scratch2, Register scratch3, Register scratch4, Register scratch5, Label *not_found)
static const int kMaxLength
static const int kValueOffset
bool Contains(Type type) const
const uint32_t kSymbolTag
static const int kNativeContextOffset
const uint32_t kAsciiStringTag
static const int kConstructStubOffset
static const int kExponentBits
static const int kHashShift
static const int kSharedFunctionInfoOffset
static const int kMaxValue
void Generate(MacroAssembler *masm)
static const int kBitField2Offset
static const int kMantissaBits
void Generate(MacroAssembler *masm)
CEntryStub(int result_size, SaveFPRegsMode save_doubles=kDontSaveFPRegs)
void check(i::Vector< const char > string)
static const int kValueOffset
static const int kDataUC16CodeOffset
void Generate(MacroAssembler *masm)
StoreBufferOverflowStub(SaveFPRegsMode save_fp)
static void GenerateHashInit(MacroAssembler *masm, Register hash, Register character)
static bool IsOrderedRelationalCompareOp(Value op)
const uint32_t kStringEncodingMask
static const int kInstanceTypeOffset
static const int kIndexOffset
void Generate(MacroAssembler *masm)
void Generate(MacroAssembler *masm)