30 #if defined(V8_TARGET_ARCH_X64)
39 #define __ ACCESS_MASM(masm)
43 Label check_heap_number, call_builtin;
45 __ j(
not_zero, &check_heap_number, Label::kNear);
48 __ bind(&check_heap_number);
50 Heap::kHeapNumberMapRootIndex);
54 __ bind(&call_builtin);
84 __ LoadRoot(
rbx, Heap::kEmptyFixedArrayRootIndex);
85 __ LoadRoot(
rcx, Heap::kTheHoleValueRootIndex);
86 __ LoadRoot(
rdi, Heap::kUndefinedValueRootIndex);
111 __ PushRoot(Heap::kFalseValueRootIndex);
113 __ TailCallRuntime(Runtime::kNewClosure, 3, 1);
143 __ LoadRoot(
rbx, Heap::kUndefinedValueRootIndex);
144 for (
int i = Context::MIN_CONTEXT_SLOTS; i < length; i++) {
154 __ TailCallRuntime(Runtime::kNewFunctionContext, 1, 1);
185 Label after_sentinel;
186 __ JumpIfNotSmi(
rcx, &after_sentinel, Label::kNear);
187 if (FLAG_debug_code) {
188 const char* message =
"Expected 0 as a Smi sentinel";
189 __ cmpq(
rcx, Immediate(0));
195 __ bind(&after_sentinel);
207 __ LoadRoot(
rbx, Heap::kTheHoleValueRootIndex);
208 for (
int i = 0; i < slots_; i++) {
218 __ TailCallRuntime(Runtime::kPushBlockContext, 2, 1);
222 static void GenerateFastCloneShallowArrayCommon(
223 MacroAssembler* masm,
233 int elements_size = 0;
237 : FixedArray::SizeFor(length);
257 __ lea(
rdx, Operand(
rax, JSArray::kSize));
273 while (i < elements_size) {
278 ASSERT(i == elements_size);
297 __ CompareRoot(
rcx, Heap::kUndefinedValueRootIndex);
303 Factory* factory = masm->isolate()->factory();
305 Label double_elements, check_fast_elements;
308 factory->fixed_cow_array_map());
310 GenerateFastCloneShallowArrayCommon(masm, 0,
314 __ bind(&check_fast_elements);
316 factory->fixed_array_map());
318 GenerateFastCloneShallowArrayCommon(masm, length_,
322 __ bind(&double_elements);
327 if (FLAG_debug_code) {
331 message =
"Expected (writable) fixed array";
332 expected_map_index = Heap::kFixedArrayMapRootIndex;
334 message =
"Expected (writable) fixed double array";
335 expected_map_index = Heap::kFixedDoubleArrayMapRootIndex;
338 message =
"Expected copy-on-write fixed array";
339 expected_map_index = Heap::kFixedCOWArrayMapRootIndex;
349 GenerateFastCloneShallowArrayCommon(masm, length_, mode, &slow_case);
353 __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1);
373 __ CompareRoot(
rcx, Heap::kUndefinedValueRootIndex);
393 __ ret(4 * kPointerSize);
396 __ TailCallRuntime(Runtime::kCreateObjectLiteralShallow, 4, 1);
406 const Register argument =
rax;
407 const Register map =
rdx;
410 __ movq(argument, Operand(
rsp, 1 * kPointerSize));
414 CheckOddball(masm,
UNDEFINED, Heap::kUndefinedValueRootIndex,
false);
417 CheckOddball(masm,
BOOLEAN, Heap::kFalseValueRootIndex,
false);
418 CheckOddball(masm,
BOOLEAN, Heap::kTrueValueRootIndex,
true);
421 CheckOddball(masm,
NULL_TYPE, Heap::kNullValueRootIndex,
false);
426 __ JumpIfNotSmi(argument, ¬_smi, Label::kNear);
428 if (!tos_.
is(argument)) {
429 __ movq(tos_, argument);
431 __ ret(1 * kPointerSize);
435 __ JumpIfSmi(argument, &patch, Label::kNear);
445 Label not_undetectable;
446 __ j(
zero, ¬_undetectable, Label::kNear);
448 __ ret(1 * kPointerSize);
449 __ bind(¬_undetectable);
457 __ j(
below, ¬_js_object, Label::kNear);
459 if (!tos_.
is(argument)) {
462 __ ret(1 * kPointerSize);
463 __ bind(¬_js_object);
472 __ ret(1 * kPointerSize);
473 __ bind(¬_string);
478 Label not_heap_number, false_result;
479 __ CompareRoot(map, Heap::kHeapNumberMapRootIndex);
483 __ j(
zero, &false_result, Label::kNear);
485 if (!tos_.
is(argument)) {
488 __ ret(1 * kPointerSize);
489 __ bind(&false_result);
491 __ ret(1 * kPointerSize);
492 __ bind(¬_heap_number);
496 GenerateTypeTransition(masm);
501 __ PushCallerSaved(save_doubles_);
502 const int argument_count = 1;
503 __ PrepareCallCFunction(argument_count);
505 __ LoadAddress(
rcx, ExternalReference::isolate_address());
507 __ LoadAddress(
rdi, ExternalReference::isolate_address());
510 AllowExternalCallThatCantCauseGC scope(masm);
512 ExternalReference::store_buffer_overflow_function(masm->isolate()),
514 __ PopCallerSaved(save_doubles_);
519 void ToBooleanStub::CheckOddball(MacroAssembler* masm,
523 const Register argument =
rax;
526 Label different_value;
527 __ CompareRoot(argument, value);
532 }
else if (!tos_.
is(argument)) {
537 __ ret(1 * kPointerSize);
538 __ bind(&different_value);
543 void ToBooleanStub::GenerateTypeTransition(MacroAssembler* masm) {
550 __ TailCallExternalReference(
551 ExternalReference(IC_Utility(IC::kToBoolean_Patch), masm->isolate()),
557 class FloatingPointHelper :
public AllStatic {
563 static void LoadSSE2SmiOperands(MacroAssembler* masm);
564 static void LoadSSE2NumberOperands(MacroAssembler* masm);
565 static void LoadSSE2UnknownOperands(MacroAssembler* masm,
570 static void LoadAsIntegers(MacroAssembler* masm,
571 Label* operand_conversion_failure,
572 Register heap_number_map);
575 static void LoadNumbersAsIntegers(MacroAssembler* masm);
587 static void NumbersToSmis(MacroAssembler* masm,
600 void IntegerConvert(MacroAssembler* masm,
608 Register double_exponent =
rbx;
609 Register double_value =
rdi;
610 Label done, exponent_63_plus;
614 __ xorl(result, result);
615 __ movq(
xmm0, double_value);
618 __ lea(double_exponent, Operand(double_value, double_value,
times_1, 0));
622 __ cmpl(double_exponent, Immediate(63));
625 __ cvttsd2siq(result,
xmm0);
626 __ jmp(&done, Label::kNear);
628 __ bind(&exponent_63_plus);
630 __ cmpl(double_exponent, Immediate(83));
633 __ j(
above, &done, Label::kNear);
640 __ addq(double_value, double_value);
641 __ sbbl(result, result);
643 __ addl(double_value, result);
647 if (result.is(
rcx)) {
648 __ xorl(double_value, result);
653 __ shll_cl(double_value);
654 __ movl(result, double_value);
657 __ xorl(result, double_value);
666 void UnaryOpStub::Generate(MacroAssembler* masm) {
667 switch (operand_type_) {
669 GenerateTypeTransition(masm);
672 GenerateSmiStub(masm);
675 GenerateHeapNumberStub(masm);
678 GenerateGenericStub(masm);
684 void UnaryOpStub::GenerateTypeTransition(MacroAssembler* masm) {
696 __ TailCallExternalReference(
697 ExternalReference(IC_Utility(IC::kUnaryOp_Patch), masm->isolate()), 4, 1);
702 void UnaryOpStub::GenerateSmiStub(MacroAssembler* masm) {
705 GenerateSmiStubSub(masm);
708 GenerateSmiStubBitNot(masm);
716 void UnaryOpStub::GenerateSmiStubSub(MacroAssembler* masm) {
718 GenerateSmiCodeSub(masm, &slow, &slow, Label::kNear, Label::kNear);
720 GenerateTypeTransition(masm);
724 void UnaryOpStub::GenerateSmiStubBitNot(MacroAssembler* masm) {
726 GenerateSmiCodeBitNot(masm, &non_smi, Label::kNear);
728 GenerateTypeTransition(masm);
732 void UnaryOpStub::GenerateSmiCodeSub(MacroAssembler* masm,
735 Label::Distance non_smi_near,
736 Label::Distance slow_near) {
738 __ JumpIfNotSmi(
rax, non_smi, non_smi_near);
739 __ SmiNeg(
rax,
rax, &done, Label::kNear);
740 __ jmp(slow, slow_near);
746 void UnaryOpStub::GenerateSmiCodeBitNot(MacroAssembler* masm,
748 Label::Distance non_smi_near) {
749 __ JumpIfNotSmi(
rax, non_smi, non_smi_near);
756 void UnaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) {
759 GenerateHeapNumberStubSub(masm);
762 GenerateHeapNumberStubBitNot(masm);
770 void UnaryOpStub::GenerateHeapNumberStubSub(MacroAssembler* masm) {
771 Label non_smi, slow, call_builtin;
772 GenerateSmiCodeSub(masm, &non_smi, &call_builtin, Label::kNear);
774 GenerateHeapNumberCodeSub(masm, &slow);
776 GenerateTypeTransition(masm);
777 __ bind(&call_builtin);
778 GenerateGenericCodeFallback(masm);
782 void UnaryOpStub::GenerateHeapNumberStubBitNot(
783 MacroAssembler* masm) {
785 GenerateSmiCodeBitNot(masm, &non_smi, Label::kNear);
787 GenerateHeapNumberCodeBitNot(masm, &slow);
789 GenerateTypeTransition(masm);
793 void UnaryOpStub::GenerateHeapNumberCodeSub(MacroAssembler* masm,
797 Heap::kHeapNumberMapRootIndex);
808 Label slow_allocate_heapnumber, heapnumber_allocated;
809 __ AllocateHeapNumber(
rcx,
rbx, &slow_allocate_heapnumber);
810 __ jmp(&heapnumber_allocated);
812 __ bind(&slow_allocate_heapnumber);
816 __ CallRuntime(Runtime::kNumberAlloc, 0);
820 __ bind(&heapnumber_allocated);
835 void UnaryOpStub::GenerateHeapNumberCodeBitNot(MacroAssembler* masm,
839 Heap::kHeapNumberMapRootIndex);
843 IntegerConvert(masm,
rax,
rax);
853 void UnaryOpStub::GenerateGenericStub(MacroAssembler* masm) {
856 GenerateGenericStubSub(masm);
859 GenerateGenericStubBitNot(masm);
867 void UnaryOpStub::GenerateGenericStubSub(MacroAssembler* masm) {
869 GenerateSmiCodeSub(masm, &non_smi, &slow, Label::kNear);
871 GenerateHeapNumberCodeSub(masm, &slow);
873 GenerateGenericCodeFallback(masm);
877 void UnaryOpStub::GenerateGenericStubBitNot(MacroAssembler* masm) {
879 GenerateSmiCodeBitNot(masm, &non_smi, Label::kNear);
881 GenerateHeapNumberCodeBitNot(masm, &slow);
883 GenerateGenericCodeFallback(masm);
887 void UnaryOpStub::GenerateGenericCodeFallback(MacroAssembler* masm) {
905 void UnaryOpStub::PrintName(StringStream* stream) {
907 const char* overwrite_name =
NULL;
912 stream->Add(
"UnaryOpStub_%s_%s_%s",
919 void BinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) {
934 __ TailCallExternalReference(
935 ExternalReference(IC_Utility(IC::kBinaryOp_Patch),
942 void BinaryOpStub::Generate(MacroAssembler* masm) {
945 AllowStubCallsScope allow_stub_calls(masm,
true);
947 switch (operands_type_) {
949 GenerateTypeTransition(masm);
952 GenerateSmiStub(masm);
960 GenerateHeapNumberStub(masm);
963 GenerateOddballStub(masm);
966 GenerateBothStringStub(masm);
969 GenerateStringStub(masm);
972 GenerateGeneric(masm);
980 void BinaryOpStub::PrintName(StringStream* stream) {
982 const char* overwrite_name;
987 default: overwrite_name =
"UnknownOverwrite";
break;
989 stream->Add(
"BinaryOpStub_%s_%s_%s",
996 void BinaryOpStub::GenerateSmiCode(
997 MacroAssembler* masm,
999 SmiCodeGenerateHeapNumberResults allow_heapnumber_results) {
1002 Register left =
rdx;
1003 Register right =
rax;
1007 bool generate_inline_heapnumber_results =
1008 (allow_heapnumber_results == ALLOW_HEAPNUMBER_RESULTS) &&
1015 Label use_fp_on_smis;
1018 if (op_ != Token::BIT_OR) {
1019 Comment smi_check_comment(masm,
"-- Smi check arguments");
1020 __ JumpIfNotBothSmi(left, right, ¬_smis);
1024 __ bind(&smi_values);
1026 Comment perform_smi(masm,
"-- Perform smi operation");
1030 __ SmiAdd(right, right, left, &use_fp_on_smis);
1034 __ SmiSub(left, left, right, &use_fp_on_smis);
1040 __ SmiMul(right, right, left, &use_fp_on_smis);
1049 __ SmiDiv(
rax, left, right, &use_fp_on_smis);
1058 __ SmiMod(
rax, left, right, &use_fp_on_smis);
1061 case Token::BIT_OR: {
1063 __ SmiOrIfSmis(right, right, left, ¬_smis);
1066 case Token::BIT_XOR:
1068 __ SmiXor(right, right, left);
1071 case Token::BIT_AND:
1073 __ SmiAnd(right, right, left);
1077 __ SmiShiftLeft(left, left, right);
1082 __ SmiShiftArithmeticRight(left, left, right);
1087 __ SmiShiftLogicalRight(left, left, right, &use_fp_on_smis);
1098 if (use_fp_on_smis.is_linked()) {
1102 __ bind(&use_fp_on_smis);
1103 if (op_ ==
Token::DIV || op_ == Token::MOD) {
1109 if (generate_inline_heapnumber_results) {
1110 __ AllocateHeapNumber(
rcx,
rbx, slow);
1111 Comment perform_float(masm,
"-- Perform float operation on smis");
1112 if (op_ == Token::SHR) {
1113 __ SmiToInteger32(left, left);
1114 __ cvtqsi2sd(
xmm0, left);
1116 FloatingPointHelper::LoadSSE2SmiOperands(masm);
1139 Comment done_comment(masm,
"-- Enter non-smi code");
1140 FloatingPointHelper::NumbersToSmis(masm, left, right,
rbx,
rdi,
rcx,
1141 &smi_values, &fail);
1142 __ jmp(&smi_values);
1147 void BinaryOpStub::GenerateFloatingPointCode(MacroAssembler* masm,
1148 Label* allocation_failure,
1149 Label* non_numeric_failure) {
1155 FloatingPointHelper::LoadSSE2UnknownOperands(masm, non_numeric_failure);
1164 GenerateHeapResultAllocation(masm, allocation_failure);
1171 __ jmp(allocation_failure);
1175 case Token::BIT_AND:
1176 case Token::BIT_XOR:
1180 Label non_smi_shr_result;
1181 Register heap_number_map =
r9;
1182 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
1183 FloatingPointHelper::LoadAsIntegers(masm, non_numeric_failure,
1186 case Token::BIT_OR:
__ orl(
rax,
rcx);
break;
1187 case Token::BIT_AND:
__ andl(
rax,
rcx);
break;
1188 case Token::BIT_XOR:
__ xorl(
rax,
rcx);
break;
1189 case Token::SAR:
__ sarl_cl(
rax);
break;
1190 case Token::SHL:
__ shll_cl(
rax);
break;
1209 if (op_ == Token::SHR) {
1210 __ bind(&non_smi_shr_result);
1211 Label allocation_failed;
1223 if (FLAG_debug_code) {
1224 __ AbortIfNotRootValue(heap_number_map,
1225 Heap::kHeapNumberMapRootIndex,
1226 "HeapNumberMap register clobbered.");
1234 __ bind(&allocation_failed);
1239 __ jmp(allocation_failure);
1246 if (FLAG_debug_code) {
1247 __ Abort(
"Unexpected fall-through in "
1248 "BinaryStub::GenerateFloatingPointCode.");
1253 void BinaryOpStub::GenerateStringAddCode(MacroAssembler* masm) {
1255 Label left_not_string, call_runtime;
1258 Register left =
rdx;
1259 Register right =
rax;
1262 __ JumpIfSmi(left, &left_not_string, Label::kNear);
1266 GenerateRegisterArgsPush(masm);
1267 __ TailCallStub(&string_add_left_stub);
1270 __ bind(&left_not_string);
1271 __ JumpIfSmi(right, &call_runtime, Label::kNear);
1276 GenerateRegisterArgsPush(masm);
1277 __ TailCallStub(&string_add_right_stub);
1280 __ bind(&call_runtime);
1284 void BinaryOpStub::GenerateCallRuntimeCode(MacroAssembler* masm) {
1285 GenerateRegisterArgsPush(masm);
1305 case Token::BIT_AND:
1308 case Token::BIT_XOR:
1326 void BinaryOpStub::GenerateSmiStub(MacroAssembler* masm) {
1331 GenerateSmiCode(masm,
NULL, NO_HEAPNUMBER_RESULTS);
1335 GenerateSmiCode(masm, &call_runtime, ALLOW_HEAPNUMBER_RESULTS);
1340 GenerateTypeTransition(masm);
1342 if (call_runtime.is_linked()) {
1343 __ bind(&call_runtime);
1344 GenerateCallRuntimeCode(masm);
1349 void BinaryOpStub::GenerateStringStub(MacroAssembler* masm) {
1352 GenerateStringAddCode(masm);
1355 GenerateTypeTransition(masm);
1359 void BinaryOpStub::GenerateBothStringStub(MacroAssembler* masm) {
1367 Register left =
rdx;
1368 Register right =
rax;
1371 __ JumpIfSmi(left, &call_runtime);
1376 __ JumpIfSmi(right, &call_runtime);
1381 GenerateRegisterArgsPush(masm);
1382 __ TailCallStub(&string_add_stub);
1384 __ bind(&call_runtime);
1385 GenerateTypeTransition(masm);
1389 void BinaryOpStub::GenerateOddballStub(MacroAssembler* masm) {
1395 GenerateStringAddCode(masm);
1400 __ CompareRoot(
rdx, Heap::kUndefinedValueRootIndex);
1405 __ LoadRoot(
rdx, Heap::kNanValueRootIndex);
1407 __ jmp(&done, Label::kNear);
1409 __ CompareRoot(
rax, Heap::kUndefinedValueRootIndex);
1414 __ LoadRoot(
rax, Heap::kNanValueRootIndex);
1418 GenerateHeapNumberStub(masm);
1422 void BinaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) {
1423 Label gc_required, not_number;
1424 GenerateFloatingPointCode(masm, &gc_required, ¬_number);
1426 __ bind(¬_number);
1427 GenerateTypeTransition(masm);
1429 __ bind(&gc_required);
1430 GenerateCallRuntimeCode(masm);
1434 void BinaryOpStub::GenerateGeneric(MacroAssembler* masm) {
1435 Label call_runtime, call_string_add_or_runtime;
1437 GenerateSmiCode(masm, &call_runtime, ALLOW_HEAPNUMBER_RESULTS);
1439 GenerateFloatingPointCode(masm, &call_runtime, &call_string_add_or_runtime);
1441 __ bind(&call_string_add_or_runtime);
1443 GenerateStringAddCode(masm);
1446 __ bind(&call_runtime);
1447 GenerateCallRuntimeCode(masm);
1451 void BinaryOpStub::GenerateHeapResultAllocation(MacroAssembler* masm,
1452 Label* alloc_failure) {
1453 Label skip_allocation;
1459 __ JumpIfNotSmi(
rdx, &skip_allocation);
1462 __ AllocateHeapNumber(
rbx,
rcx, alloc_failure);
1466 __ bind(&skip_allocation);
1474 __ JumpIfNotSmi(
rax, &skip_allocation);
1479 __ AllocateHeapNumber(
rbx,
rcx, alloc_failure);
1483 __ bind(&skip_allocation);
1490 void BinaryOpStub::GenerateRegisterArgsPush(MacroAssembler* masm) {
1513 Label runtime_call_clear_stack;
1515 const bool tagged = (argument_type_ ==
TAGGED);
1517 Label input_not_smi, loaded;
1519 __ movq(
rax, Operand(
rsp, kPointerSize));
1520 __ JumpIfNotSmi(
rax, &input_not_smi, Label::kNear);
1529 __ fld_d(Operand(
rsp, 0));
1531 __ jmp(&loaded, Label::kNear);
1533 __ bind(&input_not_smi);
1535 __ LoadRoot(
rbx, Heap::kHeapNumberMapRootIndex);
1559 __ sar(
rdx, Immediate(32));
1564 __ sarl(
rdx, Immediate(8));
1565 __ sarl(
rcx, Immediate(16));
1566 __ sarl(
rax, Immediate(24));
1571 __ andl(
rcx, Immediate(TranscendentalCache::SubCache::kCacheSize - 1));
1576 ExternalReference cache_array =
1577 ExternalReference::transcendental_cache_array_address(masm->isolate());
1578 __ movq(
rax, cache_array);
1579 int cache_array_index =
1580 type_ *
sizeof(Isolate::Current()->transcendental_cache()->caches_[0]);
1581 __ movq(
rax, Operand(
rax, cache_array_index));
1585 __ j(
zero, &runtime_call_clear_stack);
1589 TranscendentalCache::SubCache::Element test_elem[2];
1590 char* elem_start =
reinterpret_cast<char*
>(&test_elem[0]);
1591 char* elem2_start =
reinterpret_cast<char*
>(&test_elem[1]);
1592 char* elem_in0 =
reinterpret_cast<char*
>(&(test_elem[0].in[0]));
1593 char* elem_in1 =
reinterpret_cast<char*
>(&(test_elem[0].in[1]));
1594 char* elem_out =
reinterpret_cast<char*
>(&(test_elem[0].output));
1596 CHECK_EQ(16, static_cast<int>(elem2_start - elem_start));
1597 CHECK_EQ(0, static_cast<int>(elem_in0 - elem_start));
1610 Counters* counters = masm->isolate()->counters();
1611 __ IncrementCounter(counters->transcendental_cache_hit(), 1);
1615 __ ret(kPointerSize);
1621 __ bind(&cache_miss);
1622 __ IncrementCounter(counters->transcendental_cache_miss(), 1);
1625 __ AllocateHeapNumber(
rax,
rdi, &runtime_call_clear_stack);
1627 __ AllocateHeapNumber(
rax,
rdi, &skip_cache);
1636 __ ret(kPointerSize);
1642 __ bind(&skip_cache);
1645 __ fld_d(Operand(
rsp, 0));
1647 __ fstp_d(Operand(
rsp, 0));
1656 __ CallRuntimeSaveDoubles(Runtime::kAllocateInNewSpace);
1663 __ bind(&runtime_call_clear_stack);
1665 __ bind(&runtime_call);
1666 __ TailCallExternalReference(
1667 ExternalReference(RuntimeFunction(), masm->isolate()), 1, 1);
1669 __ bind(&runtime_call_clear_stack);
1670 __ bind(&runtime_call);
1671 __ AllocateHeapNumber(
rax,
rdi, &skip_cache);
1676 __ CallRuntime(RuntimeFunction(), 1);
1693 return Runtime::kAbort;
1721 __ cmpl(
rdi, Immediate(supported_exponent_limit));
1724 __ cmpl(
rdi, Immediate(0x7ff));
1725 Label non_nan_result;
1730 __ subq(
rsp, Immediate(kPointerSize));
1731 __ movl(Operand(
rsp, 4), Immediate(0x7ff80000));
1732 __ movl(Operand(
rsp, 0), Immediate(0x00000000));
1733 __ fld_d(Operand(
rsp, 0));
1734 __ addq(
rsp, Immediate(kPointerSize));
1737 __ bind(&non_nan_result);
1746 Label no_exceptions;
1750 __ testl(
rax, Immediate(5));
1751 __ j(
zero, &no_exceptions);
1753 __ bind(&no_exceptions);
1758 Label partial_remainder_loop;
1759 __ bind(&partial_remainder_loop);
1763 __ testl(
rax, Immediate(0x400));
1803 void FloatingPointHelper::LoadNumbersAsIntegers(MacroAssembler* masm) {
1807 Label rax_is_object;
1808 Label rdx_is_object;
1810 __ JumpIfNotSmi(
rdx, &rdx_is_object);
1812 __ JumpIfSmi(
rax, &rax_is_smi);
1814 __ bind(&rax_is_object);
1815 IntegerConvert(masm,
rcx,
rax);
1818 __ bind(&rdx_is_object);
1819 IntegerConvert(masm,
rdx,
rdx);
1820 __ JumpIfNotSmi(
rax, &rax_is_object);
1821 __ bind(&rax_is_smi);
1832 void FloatingPointHelper::LoadAsIntegers(MacroAssembler* masm,
1833 Label* conversion_failure,
1834 Register heap_number_map) {
1836 Label arg1_is_object, check_undefined_arg1;
1837 Label arg2_is_object, check_undefined_arg2;
1838 Label load_arg2, done;
1840 __ JumpIfNotSmi(
rdx, &arg1_is_object);
1845 __ bind(&check_undefined_arg1);
1846 __ CompareRoot(
rdx, Heap::kUndefinedValueRootIndex);
1851 __ bind(&arg1_is_object);
1855 IntegerConvert(masm,
r8,
rdx);
1858 __ bind(&load_arg2);
1860 __ JumpIfNotSmi(
rax, &arg2_is_object);
1865 __ bind(&check_undefined_arg2);
1866 __ CompareRoot(
rax, Heap::kUndefinedValueRootIndex);
1871 __ bind(&arg2_is_object);
1875 IntegerConvert(masm,
rcx,
rax);
1881 void FloatingPointHelper::LoadSSE2SmiOperands(MacroAssembler* masm) {
1889 void FloatingPointHelper::LoadSSE2NumberOperands(MacroAssembler* masm) {
1890 Label load_smi_rdx, load_nonsmi_rax, load_smi_rax, done;
1892 __ JumpIfSmi(
rdx, &load_smi_rdx);
1895 __ JumpIfSmi(
rax, &load_smi_rax);
1896 __ bind(&load_nonsmi_rax);
1900 __ bind(&load_smi_rdx);
1903 __ JumpIfNotSmi(
rax, &load_nonsmi_rax);
1905 __ bind(&load_smi_rax);
1913 void FloatingPointHelper::LoadSSE2UnknownOperands(MacroAssembler* masm,
1914 Label* not_numbers) {
1915 Label load_smi_rdx, load_nonsmi_rax, load_smi_rax, load_float_rax, done;
1917 __ LoadRoot(
rcx, Heap::kHeapNumberMapRootIndex);
1918 __ JumpIfSmi(
rdx, &load_smi_rdx);
1923 __ JumpIfSmi(
rax, &load_smi_rax);
1925 __ bind(&load_nonsmi_rax);
1931 __ bind(&load_smi_rdx);
1934 __ JumpIfNotSmi(
rax, &load_nonsmi_rax);
1936 __ bind(&load_smi_rax);
1943 void FloatingPointHelper::NumbersToSmis(MacroAssembler* masm,
1950 Label* on_not_smis) {
1951 Register heap_number_map = scratch3;
1952 Register smi_result = scratch1;
1955 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
1958 __ JumpIfSmi(first, &first_smi, Label::kNear);
1964 __ cvttsd2siq(smi_result,
xmm0);
1967 __ cvtlsi2sd(
xmm1, smi_result);
1971 __ Integer32ToSmi(first, smi_result);
1973 __ JumpIfSmi(second, (on_success !=
NULL) ? on_success : &done);
1974 __ bind(&first_smi);
1975 if (FLAG_debug_code) {
1977 __ AbortIfSmi(second);
1984 __ cvttsd2siq(smi_result,
xmm0);
1985 __ cvtlsi2sd(
xmm1, smi_result);
1989 __ Integer32ToSmi(second, smi_result);
1990 if (on_success !=
NULL) {
2001 const Register exponent =
rdx;
2003 const Register exponent =
rdi;
2005 const Register base =
rax;
2006 const Register scratch =
rcx;
2007 const XMMRegister double_result =
xmm3;
2008 const XMMRegister double_base =
xmm2;
2009 const XMMRegister double_exponent =
xmm1;
2010 const XMMRegister double_scratch =
xmm4;
2012 Label call_runtime, done, exponent_not_smi, int_exponent;
2015 __ movq(scratch, Immediate(1));
2016 __ cvtlsi2sd(double_result, scratch);
2019 Label base_is_smi, unpack_exponent;
2023 __ movq(base, Operand(
rsp, 2 * kPointerSize));
2024 __ movq(exponent, Operand(
rsp, 1 * kPointerSize));
2025 __ JumpIfSmi(base, &base_is_smi, Label::kNear);
2027 Heap::kHeapNumberMapRootIndex);
2031 __ jmp(&unpack_exponent, Label::kNear);
2033 __ bind(&base_is_smi);
2034 __ SmiToInteger32(base, base);
2035 __ cvtlsi2sd(double_base, base);
2036 __ bind(&unpack_exponent);
2038 __ JumpIfNotSmi(exponent, &exponent_not_smi, Label::kNear);
2039 __ SmiToInteger32(exponent, exponent);
2040 __ jmp(&int_exponent);
2042 __ bind(&exponent_not_smi);
2044 Heap::kHeapNumberMapRootIndex);
2047 }
else if (exponent_type_ ==
TAGGED) {
2048 __ JumpIfNotSmi(exponent, &exponent_not_smi, Label::kNear);
2049 __ SmiToInteger32(exponent, exponent);
2050 __ jmp(&int_exponent);
2052 __ bind(&exponent_not_smi);
2056 if (exponent_type_ !=
INTEGER) {
2059 __ cvttsd2si(exponent, double_exponent);
2061 __ cmpl(exponent, Immediate(0x80000000u));
2063 __ cvtlsi2sd(double_scratch, exponent);
2065 __ ucomisd(double_exponent, double_scratch);
2072 Label continue_sqrt, continue_rsqrt, not_plus_half;
2076 __ movq(double_scratch, scratch);
2078 __ ucomisd(double_scratch, double_exponent);
2086 __ movq(double_scratch, scratch);
2087 __ ucomisd(double_scratch, double_base);
2091 __ j(
carry, &continue_sqrt, Label::kNear);
2094 __ xorps(double_result, double_result);
2095 __ subsd(double_result, double_scratch);
2098 __ bind(&continue_sqrt);
2100 __ xorps(double_scratch, double_scratch);
2101 __ addsd(double_scratch, double_base);
2102 __ sqrtsd(double_result, double_scratch);
2106 __ bind(¬_plus_half);
2108 __ subsd(double_scratch, double_result);
2110 __ ucomisd(double_scratch, double_exponent);
2118 __ movq(double_scratch, scratch);
2119 __ ucomisd(double_scratch, double_base);
2123 __ j(
carry, &continue_rsqrt, Label::kNear);
2126 __ xorps(double_result, double_result);
2129 __ bind(&continue_rsqrt);
2131 __ xorps(double_exponent, double_exponent);
2132 __ addsd(double_exponent, double_base);
2133 __ sqrtsd(double_exponent, double_exponent);
2134 __ divsd(double_result, double_exponent);
2139 Label fast_power_failed;
2140 __ bind(&fast_power);
2144 __ movsd(Operand(
rsp, 0), double_exponent);
2145 __ fld_d(Operand(
rsp, 0));
2146 __ movsd(Operand(
rsp, 0), double_base);
2147 __ fld_d(Operand(
rsp, 0));
2166 __ testb(
rax, Immediate(0x5F));
2167 __ j(
not_zero, &fast_power_failed, Label::kNear);
2168 __ fstp_d(Operand(
rsp, 0));
2169 __ movsd(double_result, Operand(
rsp, 0));
2173 __ bind(&fast_power_failed);
2176 __ jmp(&call_runtime);
2180 __ bind(&int_exponent);
2181 const XMMRegister double_scratch2 = double_exponent;
2183 __ movq(scratch, exponent);
2184 __ movsd(double_scratch, double_base);
2185 __ movsd(double_scratch2, double_result);
2188 Label no_neg, while_true, no_multiply;
2189 __ testl(scratch, scratch);
2194 __ bind(&while_true);
2195 __ shrl(scratch, Immediate(1));
2197 __ mulsd(double_result, double_scratch);
2198 __ bind(&no_multiply);
2200 __ mulsd(double_scratch, double_scratch);
2204 __ testl(exponent, exponent);
2206 __ divsd(double_scratch2, double_result);
2207 __ movsd(double_result, double_scratch2);
2210 __ xorps(double_scratch2, double_scratch2);
2211 __ ucomisd(double_scratch2, double_result);
2216 __ cvtlsi2sd(double_exponent, exponent);
2219 Counters* counters = masm->isolate()->counters();
2222 __ bind(&call_runtime);
2223 __ TailCallRuntime(Runtime::kMath_pow_cfunction, 2, 1);
2228 __ AllocateHeapNumber(
rax,
rcx, &call_runtime);
2230 __ IncrementCounter(counters->math_pow(), 1);
2231 __ ret(2 * kPointerSize);
2233 __ bind(&call_runtime);
2235 __ movsd(
xmm0, double_base);
2238 AllowExternalCallThatCantCauseGC scope(masm);
2239 __ PrepareCallCFunction(2);
2241 ExternalReference::power_double_double_function(masm->isolate()), 2);
2244 __ movsd(double_result,
xmm0);
2249 __ IncrementCounter(counters->math_pow(), 1);
2255 void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
2265 __ JumpIfNotSmi(
rdx, &slow);
2285 __ lea(
rbx, Operand(
rbp, index.reg, index.scale, 0));
2287 __ movq(
rax, Operand(
rbx, index.reg, index.scale, kDisplacement));
2300 __ lea(
rbx, Operand(
rbx, index.reg, index.scale, 0));
2302 __ movq(
rax, Operand(
rbx, index.reg, index.scale, kDisplacement));
2311 __ TailCallRuntime(Runtime::kGetArgumentsProperty, 1, 1);
2315 void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) {
2325 Factory* factory = masm->isolate()->factory();
2327 __ SmiToInteger64(
rbx, Operand(
rsp, 1 * kPointerSize));
2332 Label adaptor_frame, try_allocate;
2340 __ jmp(&try_allocate, Label::kNear);
2343 __ bind(&adaptor_frame);
2344 __ SmiToInteger64(
rcx,
2349 __ movq(Operand(
rsp, 2 * kPointerSize),
rdx);
2358 __ bind(&try_allocate);
2362 const int kParameterMapHeaderSize =
2364 Label no_parameter_map;
2367 __ j(
zero, &no_parameter_map, Label::kNear);
2369 __ bind(&no_parameter_map);
2383 Label has_mapped_parameters, copy;
2387 __ j(
not_zero, &has_mapped_parameters, Label::kNear);
2391 __ jmp(©, Label::kNear);
2394 __ bind(&has_mapped_parameters);
2410 __ movq(
rdx, Operand(
rsp, 3 * kPointerSize));
2435 Label skip_parameter_map;
2437 __ j(
zero, &skip_parameter_map);
2442 __ Integer64PlusConstantToSmi(
r9,
rbx, 2);
2456 Label parameters_loop, parameters_test;
2461 __ addq(
r8, Operand(
rsp, 1 * kPointerSize));
2463 __ Move(
r11, factory->the_hole_value());
2471 __ jmp(¶meters_test, Label::kNear);
2473 __ bind(¶meters_loop);
2478 kParameterMapHeaderSize),
2485 __ bind(¶meters_test);
2487 __ j(
not_zero, ¶meters_loop, Label::kNear);
2489 __ bind(&skip_parameter_map);
2495 factory->fixed_array_map());
2498 Label arguments_loop, arguments_test;
2500 __ movq(
rdx, Operand(
rsp, 2 * kPointerSize));
2505 __ jmp(&arguments_test, Label::kNear);
2507 __ bind(&arguments_loop);
2508 __ subq(
rdx, Immediate(kPointerSize));
2514 __ addq(
r8, Immediate(1));
2516 __ bind(&arguments_test);
2518 __ j(
less, &arguments_loop, Label::kNear);
2521 __ ret(3 * kPointerSize);
2527 __ movq(Operand(
rsp, 1 * kPointerSize),
rcx);
2528 __ TailCallRuntime(Runtime::kNewStrictArgumentsFast, 3, 1);
2532 void ArgumentsAccessStub::GenerateNewNonStrictSlow(MacroAssembler* masm) {
2547 __ movq(Operand(
rsp, 1 * kPointerSize),
rcx);
2551 __ movq(Operand(
rsp, 2 * kPointerSize),
rdx);
2554 __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1);
2558 void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
2565 Label adaptor_frame, try_allocate, runtime;
2572 __ movq(
rcx, Operand(
rsp, 1 * kPointerSize));
2574 __ jmp(&try_allocate);
2577 __ bind(&adaptor_frame);
2579 __ movq(Operand(
rsp, 1 * kPointerSize),
rcx);
2583 __ movq(Operand(
rsp, 2 * kPointerSize),
rdx);
2587 Label add_arguments_object;
2588 __ bind(&try_allocate);
2590 __ j(
zero, &add_arguments_object, Label::kNear);
2592 __ bind(&add_arguments_object);
2603 __ movq(
rdi, Operand(
rdi, offset));
2613 __ movq(
rcx, Operand(
rsp, 1 * kPointerSize));
2624 __ movq(
rdx, Operand(
rsp, 2 * kPointerSize));
2641 __ movq(
rbx, Operand(
rdx, -1 * kPointerSize));
2643 __ addq(
rdi, Immediate(kPointerSize));
2644 __ subq(
rdx, Immediate(kPointerSize));
2650 __ ret(3 * kPointerSize);
2654 __ TailCallRuntime(Runtime::kNewStrictArgumentsFast, 3, 1);
2658 void RegExpExecStub::Generate(MacroAssembler* masm) {
2662 #ifdef V8_INTERPRETED_REGEXP
2663 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
2664 #else // V8_INTERPRETED_REGEXP
2673 static const int kLastMatchInfoOffset = 1 *
kPointerSize;
2674 static const int kPreviousIndexOffset = 2 *
kPointerSize;
2680 Isolate* isolate = masm->isolate();
2681 ExternalReference address_of_regexp_stack_memory_address =
2682 ExternalReference::address_of_regexp_stack_memory_address(isolate);
2683 ExternalReference address_of_regexp_stack_memory_size =
2684 ExternalReference::address_of_regexp_stack_memory_size(isolate);
2690 __ movq(
rax, Operand(
rsp, kJSRegExpOffset));
2691 __ JumpIfSmi(
rax, &runtime);
2696 if (FLAG_debug_code) {
2699 "Unexpected type for RegExp data, FixedArray expected");
2701 __ Check(
equal,
"Unexpected type for RegExp data, FixedArray expected");
2712 __ SmiToInteger32(
rdx,
2723 __ movq(
rdi, Operand(
rsp, kSubjectOffset));
2724 __ JumpIfSmi(
rdi, &runtime);
2733 __ movq(
rbx, Operand(
rsp, kPreviousIndexOffset));
2734 __ JumpIfNotSmi(
rbx, &runtime);
2741 __ movq(
rdi, Operand(
rsp, kLastMatchInfoOffset));
2742 __ JumpIfSmi(
rdi, &runtime);
2749 Heap::kFixedArrayMapRootIndex);
2763 Label seq_ascii_string, seq_two_byte_string, check_code;
2764 __ movq(
rdi, Operand(
rsp, kSubjectOffset));
2775 __ j(
zero, &seq_two_byte_string, Label::kNear);
2782 __ j(
zero, &seq_ascii_string, Label::kNear);
2791 Label cons_string, external_string, check_encoding;
2797 __ j(
less, &cons_string, Label::kNear);
2798 __ j(
equal, &external_string);
2811 __ jmp(&check_encoding, Label::kNear);
2813 __ bind(&cons_string);
2815 Heap::kEmptyStringRootIndex);
2821 __ bind(&check_encoding);
2826 __ j(
zero, &seq_two_byte_string, Label::kNear);
2832 __ bind(&seq_ascii_string);
2837 __ jmp(&check_code, Label::kNear);
2839 __ bind(&seq_two_byte_string);
2845 __ bind(&check_code);
2849 __ JumpIfSmi(
r11, &runtime);
2856 __ SmiToInteger64(
rbx, Operand(
rsp, kPreviousIndexOffset));
2863 Counters* counters = masm->isolate()->counters();
2864 __ IncrementCounter(counters->regexp_entry_native(), 1);
2867 static const int kRegExpExecuteArguments = 9;
2868 int argument_slots_on_stack =
2869 masm->ArgumentStackSlotsForCFunctionCall(kRegExpExecuteArguments);
2870 __ EnterApiExitFrame(argument_slots_on_stack);
2876 __ movq(Operand(
rsp, (argument_slots_on_stack - 1) * kPointerSize),
2880 __ movq(Operand(
rsp, (argument_slots_on_stack - 2) * kPointerSize),
2888 __ movq(Operand(
rsp, (argument_slots_on_stack - 3) * kPointerSize),
r9);
2894 __ movq(Operand(
rsp, (argument_slots_on_stack - 4) * kPointerSize),
2902 ExternalReference::address_of_static_offsets_vector(isolate));
2905 __ movq(Operand(
rsp, (argument_slots_on_stack - 5) * kPointerSize),
r8);
2912 Register arg2 =
rdx;
2913 Register arg1 =
rcx;
2915 Register arg4 =
rcx;
2916 Register arg3 =
rdx;
2917 Register arg2 =
rsi;
2918 Register arg1 =
rdi;
2934 Label setup_two_byte, setup_rest, got_length, length_not_from_slice;
2945 __ j(
zero, &setup_two_byte, Label::kNear);
2948 __ jmp(&setup_rest, Label::kNear);
2949 __ bind(&setup_two_byte);
2952 __ bind(&setup_rest);
2965 __ LeaveApiExitFrame();
2970 __ cmpl(
rax, Immediate(1));
2973 __ j(
equal, &success, Label::kNear);
2982 __ LoadRoot(
rax, Heap::kNullValueRootIndex);
2983 __ ret(4 * kPointerSize);
2987 __ movq(
rax, Operand(
rsp, kJSRegExpOffset));
2989 __ SmiToInteger32(
rax,
2996 __ movq(
rax, Operand(
rsp, kLastMatchInfoOffset));
3006 __ movq(
rax, Operand(
rsp, kSubjectOffset));
3008 __ RecordWriteField(
rbx,
3013 __ movq(
rax, Operand(
rsp, kSubjectOffset));
3015 __ RecordWriteField(
rbx,
3023 ExternalReference::address_of_static_offsets_vector(isolate));
3028 Label next_capture, done;
3031 __ bind(&next_capture);
3032 __ subq(
rdx, Immediate(1));
3043 __ jmp(&next_capture);
3047 __ movq(
rax, Operand(
rsp, kLastMatchInfoOffset));
3048 __ ret(4 * kPointerSize);
3050 __ bind(&exception);
3055 ExternalReference pending_exception_address(
3056 Isolate::kPendingExceptionAddress, isolate);
3057 Operand pending_exception_operand =
3058 masm->ExternalOperand(pending_exception_address,
rbx);
3059 __ movq(
rax, pending_exception_operand);
3060 __ LoadRoot(
rdx, Heap::kTheHoleValueRootIndex);
3063 __ movq(pending_exception_operand,
rdx);
3065 __ CompareRoot(
rax, Heap::kTerminationExceptionRootIndex);
3066 Label termination_exception;
3067 __ j(
equal, &termination_exception, Label::kNear);
3070 __ bind(&termination_exception);
3071 __ ThrowUncatchable(
rax);
3076 __ bind(&external_string);
3079 if (FLAG_debug_code) {
3083 __ Assert(
zero,
"external string expected, but not found");
3092 __ jmp(&seq_two_byte_string);
3096 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
3097 #endif // V8_INTERPRETED_REGEXP
3101 void RegExpConstructResultStub::Generate(MacroAssembler* masm) {
3102 const int kMaxInlineLength = 100;
3105 __ movq(
r8, Operand(
rsp, kPointerSize * 3));
3106 __ JumpIfNotSmi(
r8, &slowcase);
3108 __ cmpl(
rbx, Immediate(kMaxInlineLength));
3143 __ movq(
r8, Operand(
rsp, kPointerSize * 1));
3145 __ movq(
r8, Operand(
rsp, kPointerSize * 2));
3147 __ movq(
r8, Operand(
rsp, kPointerSize * 3));
3162 __ LoadRoot(
rdx, Heap::kTheHoleValueRootIndex);
3173 __ subl(
rbx, Immediate(1));
3178 __ ret(3 * kPointerSize);
3181 __ TailCallRuntime(Runtime::kRegExpConstructResult, 3, 1);
3193 Register number_string_cache = result;
3194 Register mask = scratch1;
3195 Register scratch = scratch2;
3198 __ LoadRoot(number_string_cache, Heap::kNumberStringCacheRootIndex);
3204 __ shrl(mask, Immediate(1));
3205 __ subq(mask, Immediate(1));
3212 Label load_result_from_cache;
3213 Factory* factory = masm->isolate()->factory();
3214 if (!object_is_smi) {
3215 __ JumpIfSmi(
object, &is_smi);
3217 factory->heap_number_map(),
3224 GenerateConvertHashCodeToIndex(masm, scratch, mask);
3226 Register index = scratch;
3227 Register probe = mask;
3233 __ JumpIfSmi(probe, not_found);
3239 __ jmp(&load_result_from_cache);
3243 __ SmiToInteger32(scratch,
object);
3244 GenerateConvertHashCodeToIndex(masm, scratch, mask);
3246 Register index = scratch;
3256 __ bind(&load_result_from_cache);
3262 Counters* counters = masm->isolate()->counters();
3263 __ IncrementCounter(counters->number_to_string_native(), 1);
3267 void NumberToStringStub::GenerateConvertHashCodeToIndex(MacroAssembler* masm,
3270 __ and_(hash, mask);
3279 void NumberToStringStub::Generate(MacroAssembler* masm) {
3282 __ movq(
rbx, Operand(
rsp, kPointerSize));
3286 __ ret(1 * kPointerSize);
3290 __ TailCallRuntime(Runtime::kNumberToStringSkipCache, 1, 1);
3294 static int NegativeComparisonResult(
Condition cc) {
3305 Label check_unequal_objects, done;
3306 Factory* factory = masm->isolate()->factory();
3309 if (include_smi_compare_) {
3310 Label non_smi, smi_done;
3311 __ JumpIfNotBothSmi(
rax,
rdx, &non_smi);
3319 }
else if (FLAG_debug_code) {
3321 __ JumpIfNotSmi(
rdx, &ok);
3322 __ JumpIfNotSmi(
rax, &ok);
3323 __ Abort(
"CompareStub: smi operands");
3334 Label not_identical;
3341 Label check_for_nan;
3342 __ CompareRoot(
rdx, Heap::kUndefinedValueRootIndex);
3344 __ Set(
rax, NegativeComparisonResult(cc_));
3346 __ bind(&check_for_nan);
3354 if (never_nan_nan_ && (cc_ ==
equal)) {
3361 factory->heap_number_map());
3362 __ j(
equal, &heap_number, Label::kNear);
3371 __ bind(&heap_number);
3387 __ bind(¬_identical);
3406 factory->heap_number_map());
3422 Label first_non_object;
3424 __ j(
below, &first_non_object, Label::kNear);
3426 Label return_not_equal;
3428 __ bind(&return_not_equal);
3431 __ bind(&first_non_object);
3434 __ j(
equal, &return_not_equal);
3441 __ j(
equal, &return_not_equal);
3449 if (include_number_compare_) {
3450 Label non_number_comparison;
3452 FloatingPointHelper::LoadSSE2UnknownOperands(masm, &non_number_comparison);
3467 __ bind(&unordered);
3477 __ bind(&non_number_comparison);
3481 Label check_for_strings;
3492 __ bind(&check_for_strings);
3494 __ JumpIfNotBothSequentialAsciiStrings(
3515 __ Abort(
"Unexpected fall-through from string comparison");
3518 __ bind(&check_unequal_objects);
3519 if (cc_ ==
equal && !strict_) {
3523 Label not_both_objects, return_unequal;
3531 __ j(
not_zero, ¬_both_objects, Label::kNear);
3533 __ j(
below, ¬_both_objects, Label::kNear);
3535 __ j(
below, ¬_both_objects, Label::kNear);
3538 __ j(
zero, &return_unequal, Label::kNear);
3541 __ j(
zero, &return_unequal, Label::kNear);
3545 __ bind(&return_unequal);
3549 __ bind(¬_both_objects);
3560 builtin = strict_ ? Builtins::STRICT_EQUALS : Builtins::EQUALS;
3575 void CompareStub::BranchIfNonSymbol(MacroAssembler* masm,
3579 __ JumpIfSmi(
object, label);
3592 __ TailCallRuntime(Runtime::kStackGuard, 0, 1);
3597 __ TailCallRuntime(Runtime::kInterrupt, 0, 1);
3601 static void GenerateRecordCallTarget(MacroAssembler* masm) {
3607 Isolate* isolate = masm->isolate();
3608 Label initialize, done;
3616 __ j(
equal, &done, Label::kNear);
3618 __ j(
equal, &done, Label::kNear);
3623 __ j(
equal, &initialize, Label::kNear);
3628 __ jmp(&done, Label::kNear);
3631 __ bind(&initialize);
3642 Isolate* isolate = masm->isolate();
3643 Label slow, non_function;
3648 if (ReceiverMightBeImplicit()) {
3652 __ movq(
rax, Operand(
rsp, (argc_ + 1) * kPointerSize));
3654 __ CompareRoot(
rax, Heap::kTheHoleValueRootIndex);
3659 __ movq(Operand(
rsp, (argc_ + 1) * kPointerSize),
rcx);
3664 __ JumpIfSmi(
rdi, &non_function);
3669 if (RecordCallTarget()) {
3670 GenerateRecordCallTarget(masm);
3674 ParameterCount actual(argc_);
3676 if (ReceiverMightBeImplicit()) {
3677 Label call_as_function;
3678 __ CompareRoot(
rax, Heap::kTheHoleValueRootIndex);
3679 __ j(
equal, &call_as_function);
3680 __ InvokeFunction(
rdi,
3685 __ bind(&call_as_function);
3687 __ InvokeFunction(
rdi,
3695 if (RecordCallTarget()) {
3708 __ Set(
rax, argc_ + 1);
3711 __ GetBuiltinEntry(
rdx, Builtins::CALL_FUNCTION_PROXY);
3713 Handle<Code> adaptor =
3714 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline();
3715 __ jmp(adaptor, RelocInfo::CODE_TARGET);
3720 __ bind(&non_function);
3721 __ movq(Operand(
rsp, (argc_ + 1) * kPointerSize),
rdi);
3725 __ GetBuiltinEntry(
rdx, Builtins::CALL_NON_FUNCTION);
3726 Handle<Code> adaptor =
3727 Isolate::Current()->builtins()->ArgumentsAdaptorTrampoline();
3728 __ Jump(adaptor, RelocInfo::CODE_TARGET);
3736 Label slow, non_function_call;
3739 __ JumpIfSmi(
rdi, &non_function_call);
3744 if (RecordCallTarget()) {
3745 GenerateRecordCallTarget(masm);
3761 __ GetBuiltinEntry(
rdx, Builtins::CALL_FUNCTION_PROXY_AS_CONSTRUCTOR);
3764 __ bind(&non_function_call);
3765 __ GetBuiltinEntry(
rdx, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR);
3770 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
3771 RelocInfo::CODE_TARGET);
3775 bool CEntryStub::NeedsImmovableCode() {
3782 return result_size_ == 1;
3789 void CodeStub::GenerateStubsAheadOfTime() {
3797 void CodeStub::GenerateFPStubs() {
3803 stub.GetCode()->set_is_pregenerated(
true);
3805 save_doubles.GetCode()->set_is_pregenerated(
true);
3809 void CEntryStub::GenerateCore(MacroAssembler* masm,
3810 Label* throw_normal_exception,
3811 Label* throw_termination_exception,
3812 Label* throw_out_of_memory_exception,
3814 bool always_allocate_scope) {
3829 if (FLAG_debug_code) {
3830 __ CheckStackAlignment();
3849 ExternalReference scope_depth =
3850 ExternalReference::heap_always_allocate_scope_depth(masm->isolate());
3851 if (always_allocate_scope) {
3852 Operand scope_depth_operand = masm->ExternalOperand(scope_depth);
3853 __ incl(scope_depth_operand);
3862 if (result_size_ < 2) {
3866 __ LoadAddress(
rdx, ExternalReference::isolate_address());
3873 __ LoadAddress(
r8, ExternalReference::isolate_address());
3880 __ movq(
rdx, ExternalReference::isolate_address());
3885 if (always_allocate_scope) {
3886 Operand scope_depth_operand = masm->ExternalOperand(scope_depth);
3887 __ decl(scope_depth_operand);
3891 Label failure_returned;
3895 if (result_size_ > 1) {
3900 __ movq(
rax, Operand(
rsp, 6 * kPointerSize));
3901 __ movq(
rdx, Operand(
rsp, 7 * kPointerSize));
3907 __ j(
zero, &failure_returned);
3910 __ LeaveExitFrame(save_doubles_);
3914 __ bind(&failure_returned);
3920 __ j(
zero, &retry, Label::kNear);
3925 __ j(
equal, throw_out_of_memory_exception);
3928 ExternalReference pending_exception_address(
3929 Isolate::kPendingExceptionAddress, masm->isolate());
3930 Operand pending_exception_operand =
3931 masm->ExternalOperand(pending_exception_address);
3932 __ movq(
rax, pending_exception_operand);
3933 __ LoadRoot(
rdx, Heap::kTheHoleValueRootIndex);
3934 __ movq(pending_exception_operand,
rdx);
3938 __ CompareRoot(
rax, Heap::kTerminationExceptionRootIndex);
3939 __ j(
equal, throw_termination_exception);
3942 __ jmp(throw_normal_exception);
3963 int arg_stack_space = (result_size_ < 2 ? 2 : 4);
3965 int arg_stack_space = 0;
3967 __ EnterExitFrame(arg_stack_space, save_doubles_);
3981 Label throw_normal_exception;
3982 Label throw_termination_exception;
3983 Label throw_out_of_memory_exception;
3987 &throw_normal_exception,
3988 &throw_termination_exception,
3989 &throw_out_of_memory_exception,
3995 &throw_normal_exception,
3996 &throw_termination_exception,
3997 &throw_out_of_memory_exception,
4005 &throw_normal_exception,
4006 &throw_termination_exception,
4007 &throw_out_of_memory_exception,
4011 __ bind(&throw_out_of_memory_exception);
4013 Isolate* isolate = masm->isolate();
4014 ExternalReference external_caught(Isolate::kExternalCaughtExceptionAddress,
4016 __ Set(
rax, static_cast<int64_t>(
false));
4017 __ Store(external_caught,
rax);
4020 ExternalReference pending_exception(Isolate::kPendingExceptionAddress,
4023 __ Store(pending_exception,
rax);
4026 __ bind(&throw_termination_exception);
4027 __ ThrowUncatchable(
rax);
4029 __ bind(&throw_normal_exception);
4035 Label invoke, handler_entry, exit;
4036 Label not_outermost_js, not_outermost_js_2;
4038 MacroAssembler::NoRootArrayScope uninitialized_root_register(masm);
4044 int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY;
4068 __ InitializeSmiConstantRegister();
4069 __ InitializeRootRegister();
4072 Isolate* isolate = masm->isolate();
4075 ExternalReference c_entry_fp(Isolate::kCEntryFPAddress, isolate);
4077 Operand c_entry_fp_operand = masm->ExternalOperand(c_entry_fp);
4078 __ push(c_entry_fp_operand);
4082 ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate);
4088 __ Store(js_entry_sp,
rax);
4091 __ bind(¬_outermost_js);
4098 __ bind(&handler_entry);
4099 handler_offset_ = handler_entry.pos();
4102 ExternalReference pending_exception(Isolate::kPendingExceptionAddress,
4104 __ Store(pending_exception,
rax);
4111 __ PushTryHandler(StackHandler::JS_ENTRY, 0);
4114 __ LoadRoot(
rax, Heap::kTheHoleValueRootIndex);
4115 __ Store(pending_exception,
rax);
4118 __ push(Immediate(0));
4126 ExternalReference construct_entry(Builtins::kJSConstructEntryTrampoline,
4130 ExternalReference entry(Builtins::kJSEntryTrampoline, isolate);
4146 __ bind(¬_outermost_js_2);
4149 { Operand c_entry_fp_operand = masm->ExternalOperand(c_entry_fp);
4150 __ pop(c_entry_fp_operand);
4164 __ addq(
rsp, Immediate(2 * kPointerSize));
4187 static const int kOffsetToMapCheckValue = 2;
4188 static const int kOffsetToResultValue = 18;
4193 static const unsigned int kWordBeforeMapCheckValue = 0xBA49FF78;
4198 static const unsigned int kWordBeforeResultValue = 0x458B4909;
4201 int extra_stack_space = HasCallSiteInlineCheck() ? kPointerSize : 0;
4206 __ movq(
rax, Operand(
rsp, 2 * kPointerSize + extra_stack_space));
4207 __ JumpIfSmi(
rax, &slow);
4216 __ movq(
rdx, Operand(
rsp, 1 * kPointerSize + extra_stack_space));
4221 if (!HasCallSiteInlineCheck()) {
4224 __ CompareRoot(
rdx, Heap::kInstanceofCacheFunctionRootIndex);
4226 __ CompareRoot(
rax, Heap::kInstanceofCacheMapRootIndex);
4228 __ LoadRoot(
rax, Heap::kInstanceofCacheAnswerRootIndex);
4229 __ ret(2 * kPointerSize);
4233 __ TryGetFunctionPrototype(
rdx,
rbx, &slow,
true);
4236 __ JumpIfSmi(
rbx, &slow);
4246 if (!HasCallSiteInlineCheck()) {
4247 __ StoreRoot(
rdx, Heap::kInstanceofCacheFunctionRootIndex);
4248 __ StoreRoot(
rax, Heap::kInstanceofCacheMapRootIndex);
4253 if (FLAG_debug_code) {
4254 __ movl(
rdi, Immediate(kWordBeforeMapCheckValue));
4256 __ Assert(
equal,
"InstanceofStub unexpected call site cache (check).");
4266 Label loop, is_instance, is_not_instance;
4270 __ j(
equal, &is_instance, Label::kNear);
4274 __ j(
equal, &is_not_instance, Label::kNear);
4279 __ bind(&is_instance);
4280 if (!HasCallSiteInlineCheck()) {
4284 __ StoreRoot(
rax, Heap::kInstanceofCacheAnswerRootIndex);
4287 int true_offset = 0x100 +
4290 ASSERT(true_offset >= 0 && true_offset < 0x100);
4291 __ movl(
rax, Immediate(true_offset));
4295 if (FLAG_debug_code) {
4296 __ movl(
rax, Immediate(kWordBeforeResultValue));
4298 __ Assert(
equal,
"InstanceofStub unexpected call site cache (mov).");
4302 __ ret(2 * kPointerSize + extra_stack_space);
4304 __ bind(&is_not_instance);
4305 if (!HasCallSiteInlineCheck()) {
4310 int false_offset = 0x100 +
4313 ASSERT(false_offset >= 0 && false_offset < 0x100);
4314 __ movl(
rax, Immediate(false_offset));
4318 if (FLAG_debug_code) {
4319 __ movl(
rax, Immediate(kWordBeforeResultValue));
4321 __ Assert(
equal,
"InstanceofStub unexpected call site cache (mov)");
4324 __ ret(2 * kPointerSize + extra_stack_space);
4328 if (HasCallSiteInlineCheck()) {
4345 int CompareStub::MinorKey() {
4349 ASSERT(static_cast<unsigned>(cc_) < (1 << 12));
4351 return ConditionField::encode(static_cast<unsigned>(cc_))
4352 | RegisterField::encode(
false)
4353 | StrictField::encode(strict_)
4354 | NeverNanNanField::encode(cc_ ==
equal ? never_nan_nan_ :
false)
4355 | IncludeNumberCompareField::encode(include_number_compare_)
4356 | IncludeSmiCompareField::encode(include_smi_compare_);
4362 void CompareStub::PrintName(StringStream* stream) {
4364 const char* cc_name;
4366 case less: cc_name =
"LT";
break;
4367 case greater: cc_name =
"GT";
break;
4370 case equal: cc_name =
"EQ";
break;
4372 default: cc_name =
"UnknownCondition";
break;
4375 stream->Add(
"CompareStub_%s", cc_name);
4376 if (strict_ && is_equality) stream->Add(
"_STRICT");
4377 if (never_nan_nan_ && is_equality) stream->Add(
"_NO_NAN");
4378 if (!include_number_compare_) stream->Add(
"_NO_NUMBER");
4379 if (!include_smi_compare_) stream->Add(
"_NO_SMI");
4389 Label got_char_code;
4390 Label sliced_string;
4393 __ JumpIfSmi(object_, receiver_not_string_);
4403 __ JumpIfNotSmi(index_, &index_not_smi_);
4404 __ bind(&got_smi_index_);
4410 __ SmiToInteger32(index_, index_);
4413 masm, object_, index_, result_, &call_runtime_);
4415 __ Integer32ToSmi(result_, result_);
4421 MacroAssembler* masm,
4422 const RuntimeCallHelper& call_helper) {
4423 __ Abort(
"Unexpected fallthrough to CharCodeAt slow case");
4425 Factory* factory = masm->isolate()->factory();
4427 __ bind(&index_not_smi_);
4430 factory->heap_number_map(),
4433 call_helper.BeforeCall(masm);
4437 __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero, 1);
4441 __ CallRuntime(Runtime::kNumberToSmi, 1);
4443 if (!index_.
is(
rax)) {
4446 __ movq(index_,
rax);
4452 call_helper.AfterCall(masm);
4454 __ JumpIfNotSmi(index_, index_out_of_range_);
4456 __ jmp(&got_smi_index_);
4461 __ bind(&call_runtime_);
4462 call_helper.BeforeCall(masm);
4464 __ Integer32ToSmi(index_, index_);
4466 __ CallRuntime(Runtime::kStringCharCodeAt, 2);
4467 if (!result_.
is(
rax)) {
4468 __ movq(result_,
rax);
4470 call_helper.AfterCall(masm);
4473 __ Abort(
"Unexpected fallthrough from CharCodeAt slow case");
4482 __ JumpIfNotSmi(code_, &slow_case_);
4486 __ LoadRoot(result_, Heap::kSingleCharacterStringCacheRootIndex);
4490 __ CompareRoot(result_, Heap::kUndefinedValueRootIndex);
4497 MacroAssembler* masm,
4498 const RuntimeCallHelper& call_helper) {
4499 __ Abort(
"Unexpected fallthrough to CharFromCode slow case");
4501 __ bind(&slow_case_);
4502 call_helper.BeforeCall(masm);
4504 __ CallRuntime(Runtime::kCharFromCode, 1);
4505 if (!result_.
is(
rax)) {
4506 __ movq(result_,
rax);
4508 call_helper.AfterCall(masm);
4511 __ Abort(
"Unexpected fallthrough from CharFromCode slow case");
4525 MacroAssembler* masm,
4526 const RuntimeCallHelper& call_helper) {
4527 char_code_at_generator_.
GenerateSlow(masm, call_helper);
4528 char_from_code_generator_.
GenerateSlow(masm, call_helper);
4532 void StringAddStub::Generate(MacroAssembler* masm) {
4533 Label call_runtime, call_builtin;
4537 __ movq(
rax, Operand(
rsp, 2 * kPointerSize));
4538 __ movq(
rdx, Operand(
rsp, 1 * kPointerSize));
4542 __ JumpIfSmi(
rax, &call_runtime);
4547 __ JumpIfSmi(
rdx, &call_runtime);
4555 GenerateConvertArgument(masm, 2 * kPointerSize,
rax,
rbx,
rcx,
rdi,
4557 builtin_id = Builtins::STRING_ADD_RIGHT;
4559 ASSERT((flags_ & NO_STRING_CHECK_LEFT_IN_STUB) != 0);
4560 GenerateConvertArgument(masm, 1 * kPointerSize,
rdx,
rbx,
rcx,
rdi,
4562 builtin_id = Builtins::STRING_ADD_LEFT;
4570 Label second_not_zero_length, both_not_zero_length;
4573 __ j(
not_zero, &second_not_zero_length, Label::kNear);
4575 Counters* counters = masm->isolate()->counters();
4576 __ IncrementCounter(counters->string_add_native(), 1);
4577 __ ret(2 * kPointerSize);
4578 __ bind(&second_not_zero_length);
4581 __ j(
not_zero, &both_not_zero_length, Label::kNear);
4584 __ IncrementCounter(counters->string_add_native(), 1);
4585 __ ret(2 * kPointerSize);
4594 Label string_add_flat_result, longer_than_two;
4595 __ bind(&both_not_zero_length);
4616 __ JumpIfBothInstanceTypesAreNotSequentialAscii(
r8,
r9,
rbx,
rcx,
4625 Label make_two_character_string, make_flat_ascii_string;
4628 __ IncrementCounter(counters->string_add_native(), 1);
4629 __ ret(2 * kPointerSize);
4631 __ bind(&make_two_character_string);
4642 __ IncrementCounter(counters->string_add_native(), 1);
4643 __ ret(2 * kPointerSize);
4645 __ bind(&longer_than_two);
4648 __ j(
below, &string_add_flat_result);
4661 Label non_ascii, allocated, ascii_data;
4668 __ bind(&ascii_data);
4671 __ bind(&allocated);
4679 __ IncrementCounter(counters->string_add_native(), 1);
4680 __ ret(2 * kPointerSize);
4681 __ bind(&non_ascii);
4707 Label first_prepared, second_prepared;
4708 Label first_is_sequential, second_is_sequential;
4709 __ bind(&string_add_flat_result);
4715 __ j(
zero, &first_is_sequential, Label::kNear);
4721 __ jmp(&first_prepared, Label::kNear);
4722 __ bind(&first_is_sequential);
4725 __ bind(&first_prepared);
4736 __ j(
zero, &second_is_sequential, Label::kNear);
4742 __ jmp(&second_prepared, Label::kNear);
4743 __ bind(&second_is_sequential);
4746 __ bind(&second_prepared);
4748 Label non_ascii_string_add_flat_result;
4754 __ j(
zero, &non_ascii_string_add_flat_result);
4756 __ bind(&make_flat_ascii_string);
4770 __ IncrementCounter(counters->string_add_native(), 1);
4771 __ ret(2 * kPointerSize);
4773 __ bind(&non_ascii_string_add_flat_result);
4787 __ IncrementCounter(counters->string_add_native(), 1);
4788 __ ret(2 * kPointerSize);
4791 __ bind(&call_runtime);
4792 __ TailCallRuntime(Runtime::kStringAdd, 2, 1);
4794 if (call_builtin.is_linked()) {
4795 __ bind(&call_builtin);
4801 void StringAddStub::GenerateConvertArgument(MacroAssembler* masm,
4809 Label not_string, done;
4810 __ JumpIfSmi(arg, ¬_string);
4816 __ bind(¬_string);
4825 __ movq(arg, scratch1);
4826 __ movq(Operand(
rsp, stack_offset), arg);
4830 __ bind(¬_cached);
4831 __ JumpIfSmi(arg, slow);
4838 __ movq(Operand(
rsp, stack_offset), arg);
4861 __ addq(src, Immediate(2));
4862 __ addq(dest, Immediate(2));
4884 __ testl(count, count);
4885 __ j(
zero, &done, Label::kNear);
4890 __ addl(count, count);
4895 __ testl(count, Immediate(~7));
4896 __ j(
zero, &last_bytes, Label::kNear);
4900 __ shr(count, Immediate(3));
4905 __ and_(count, Immediate(7));
4908 __ bind(&last_bytes);
4909 __ testl(count, count);
4910 __ j(
zero, &done, Label::kNear);
4934 Register scratch = scratch3;
4938 Label not_array_index;
4939 __ leal(scratch, Operand(c1, -
'0'));
4940 __ cmpl(scratch, Immediate(static_cast<int>(
'9' -
'0')));
4941 __ j(
above, ¬_array_index, Label::kNear);
4942 __ leal(scratch, Operand(c2, -
'0'));
4943 __ cmpl(scratch, Immediate(static_cast<int>(
'9' -
'0')));
4946 __ bind(¬_array_index);
4948 Register hash = scratch1;
4954 Register chars = c1;
4962 Register symbol_table = c2;
4966 Register mask = scratch2;
4967 __ SmiToInteger32(mask,
4971 Register map = scratch4;
4982 static const int kProbes = 4;
4983 Label found_in_symbol_table;
4984 Label next_probe[kProbes];
4985 Register candidate = scratch;
4986 for (
int i = 0; i < kProbes; i++) {
4988 __ movl(scratch, hash);
4990 __ addl(scratch, Immediate(SymbolTable::GetProbeOffset(i)));
4992 __ andl(scratch, mask);
5007 __ CompareRoot(candidate, Heap::kUndefinedValueRootIndex);
5010 if (FLAG_debug_code) {
5013 __ Assert(
equal,
"oddball in symbol table is not undefined or the hole");
5015 __ jmp(&next_probe[i]);
5017 __ bind(&is_string);
5030 __ JumpIfInstanceTypeIsNotSequentialAscii(
5031 temp, temp, &next_probe[i]);
5035 __ andl(temp, Immediate(0x0000ffff));
5036 __ cmpl(chars, temp);
5037 __ j(
equal, &found_in_symbol_table);
5038 __ bind(&next_probe[i]);
5045 Register result = candidate;
5046 __ bind(&found_in_symbol_table);
5047 if (!result.is(
rax)) {
5048 __ movq(
rax, result);
5058 __ LoadRoot(scratch, Heap::kHashSeedRootIndex);
5059 __ SmiToInteger32(scratch, scratch);
5060 __ addl(scratch, character);
5061 __ movl(hash, scratch);
5062 __ shll(scratch, Immediate(10));
5063 __ addl(hash, scratch);
5065 __ movl(scratch, hash);
5066 __ shrl(scratch, Immediate(6));
5067 __ xorl(hash, scratch);
5076 __ addl(hash, character);
5078 __ movl(scratch, hash);
5079 __ shll(scratch, Immediate(10));
5080 __ addl(hash, scratch);
5082 __ movl(scratch, hash);
5083 __ shrl(scratch, Immediate(6));
5084 __ xorl(hash, scratch);
5092 __ leal(hash, Operand(hash, hash,
times_8, 0));
5094 __ movl(scratch, hash);
5095 __ shrl(scratch, Immediate(11));
5096 __ xorl(hash, scratch);
5098 __ movl(scratch, hash);
5099 __ shll(scratch, Immediate(15));
5100 __ addl(hash, scratch);
5105 Label hash_not_zero;
5108 __ bind(&hash_not_zero);
5111 void SubStringStub::Generate(MacroAssembler* masm) {
5123 const int kArgumentsSize = (kStringOffset +
kPointerSize) - kToOffset;
5126 __ movq(
rax, Operand(
rsp, kStringOffset));
5136 __ movq(
rcx, Operand(
rsp, kToOffset));
5137 __ movq(
rdx, Operand(
rsp, kFromOffset));
5138 __ JumpUnlessBothNonNegativeSmi(
rcx,
rdx, &runtime);
5142 Label not_original_string;
5144 __ j(
below, ¬_original_string, Label::kNear);
5148 Counters* counters = masm->isolate()->counters();
5149 __ IncrementCounter(counters->sub_string_native(), 1);
5150 __ ret(kArgumentsSize);
5151 __ bind(¬_original_string);
5160 Label underlying_unpacked, sliced_string, seq_or_external_string;
5165 __ j(
zero, &seq_or_external_string, Label::kNear);
5168 __ j(
not_zero, &sliced_string, Label::kNear);
5172 Heap::kEmptyStringRootIndex);
5178 __ jmp(&underlying_unpacked, Label::kNear);
5180 __ bind(&sliced_string);
5187 __ jmp(&underlying_unpacked, Label::kNear);
5189 __ bind(&seq_or_external_string);
5193 __ bind(&underlying_unpacked);
5195 if (FLAG_string_slices) {
5205 __ j(
less, ©_routine);
5211 Label two_byte_slice, set_slice_header;
5215 __ j(
zero, &two_byte_slice, Label::kNear);
5216 __ AllocateAsciiSlicedString(
rax,
rbx,
r14, &runtime);
5217 __ jmp(&set_slice_header, Label::kNear);
5218 __ bind(&two_byte_slice);
5219 __ AllocateTwoByteSlicedString(
rax,
rbx,
r14, &runtime);
5220 __ bind(&set_slice_header);
5227 __ IncrementCounter(counters->sub_string_native(), 1);
5228 __ ret(kArgumentsSize);
5230 __ bind(©_routine);
5239 Label two_byte_sequential, sequential_string;
5243 __ j(
zero, &sequential_string);
5255 __ bind(&sequential_string);
5258 __ j(
zero, &two_byte_sequential);
5267 SmiIndex smi_as_index = masm->SmiToIndex(
rdx,
rdx,
times_1);
5268 __ lea(
rsi, Operand(
rdi, smi_as_index.reg, smi_as_index.scale,
5281 __ IncrementCounter(counters->sub_string_native(), 1);
5282 __ ret(kArgumentsSize);
5284 __ bind(&two_byte_sequential);
5292 SmiIndex smi_as_index = masm->SmiToIndex(
rdx,
rdx,
times_2);
5293 __ lea(
rsi, Operand(
rdi, smi_as_index.reg, smi_as_index.scale,
5306 __ IncrementCounter(counters->sub_string_native(), 1);
5307 __ ret(kArgumentsSize);
5311 __ TailCallRuntime(Runtime::kSubString, 3, 1);
5319 Register scratch2) {
5320 Register length = scratch1;
5323 Label check_zero_length;
5326 __ j(
equal, &check_zero_length, Label::kNear);
5331 Label compare_chars;
5332 __ bind(&check_zero_length);
5335 __ j(
not_zero, &compare_chars, Label::kNear);
5340 __ bind(&compare_chars);
5341 Label strings_not_equal;
5342 GenerateAsciiCharsCompareLoop(masm, left, right, length, scratch2,
5343 &strings_not_equal, Label::kNear);
5350 __ bind(&strings_not_equal);
5362 Register scratch4) {
5369 __ movq(scratch4, scratch1);
5374 const Register length_difference = scratch4;
5376 __ j(
less, &left_shorter, Label::kNear);
5380 __ SmiSub(scratch1, scratch1, length_difference);
5381 __ bind(&left_shorter);
5383 const Register min_length = scratch1;
5385 Label compare_lengths;
5387 __ SmiTest(min_length);
5388 __ j(
zero, &compare_lengths, Label::kNear);
5391 Label result_not_equal;
5392 GenerateAsciiCharsCompareLoop(masm, left, right, min_length, scratch2,
5393 &result_not_equal, Label::kNear);
5397 __ bind(&compare_lengths);
5398 __ SmiTest(length_difference);
5399 __ j(
not_zero, &result_not_equal, Label::kNear);
5405 Label result_greater;
5406 __ bind(&result_not_equal);
5408 __ j(
greater, &result_greater, Label::kNear);
5415 __ bind(&result_greater);
5421 void StringCompareStub::GenerateAsciiCharsCompareLoop(
5422 MacroAssembler* masm,
5427 Label* chars_not_equal,
5428 Label::Distance near_jump) {
5432 __ SmiToInteger32(length, length);
5438 Register index = length;
5443 __ movb(scratch, Operand(left, index,
times_1, 0));
5444 __ cmpb(scratch, Operand(right, index,
times_1, 0));
5451 void StringCompareStub::Generate(MacroAssembler* masm) {
5459 __ movq(
rdx, Operand(
rsp, 2 * kPointerSize));
5460 __ movq(
rax, Operand(
rsp, 1 * kPointerSize));
5467 Counters* counters = masm->isolate()->counters();
5468 __ IncrementCounter(counters->string_compare_native(), 1);
5469 __ ret(2 * kPointerSize);
5474 __ JumpIfNotBothSequentialAsciiStrings(
rdx,
rax,
rcx,
rbx, &runtime);
5477 __ IncrementCounter(counters->string_compare_native(), 1);
5480 __ addq(
rsp, Immediate(2 * kPointerSize));
5487 __ TailCallRuntime(Runtime::kStringCompare, 2, 1);
5491 void ICCompareStub::GenerateSmis(MacroAssembler* masm) {
5494 __ JumpIfNotBothSmi(
rdx,
rax, &miss, Label::kNear);
5496 if (GetCondition() ==
equal) {
5515 void ICCompareStub::GenerateHeapNumbers(MacroAssembler* masm) {
5519 Label unordered, maybe_undefined1, maybe_undefined2;
5522 __ j(either_smi, &generic_stub, Label::kNear);
5541 __ movl(
rax, Immediate(0));
5542 __ movl(
rcx, Immediate(0));
5547 __ bind(&unordered);
5549 __ bind(&generic_stub);
5550 __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET);
5552 __ bind(&maybe_undefined1);
5554 __ Cmp(
rax, masm->isolate()->factory()->undefined_value());
5561 __ bind(&maybe_undefined2);
5563 __ Cmp(
rdx, masm->isolate()->factory()->undefined_value());
5572 void ICCompareStub::GenerateSymbols(MacroAssembler* masm) {
5577 Register left =
rdx;
5578 Register right =
rax;
5579 Register tmp1 =
rcx;
5580 Register tmp2 =
rbx;
5584 Condition cond = masm->CheckEitherSmi(left, right, tmp1);
5585 __ j(cond, &miss, Label::kNear);
5593 __ and_(tmp1, tmp2);
5595 __ j(
zero, &miss, Label::kNear);
5599 __ cmpq(left, right);
5615 void ICCompareStub::GenerateStrings(MacroAssembler* masm) {
5622 Register left =
rdx;
5623 Register right =
rax;
5624 Register tmp1 =
rcx;
5625 Register tmp2 =
rbx;
5626 Register tmp3 =
rdi;
5629 Condition cond = masm->CheckEitherSmi(left, right, tmp1);
5638 __ movq(tmp3, tmp1);
5646 __ cmpq(left, right);
5661 __ and_(tmp1, tmp2);
5663 __ j(
zero, &do_compare, Label::kNear);
5668 __ bind(&do_compare);
5673 __ JumpIfNotBothSequentialAsciiStrings(left, right, tmp1, tmp2, &runtime);
5678 masm, left, right, tmp1, tmp2);
5691 __ TailCallRuntime(Runtime::kStringEquals, 2, 1);
5693 __ TailCallRuntime(Runtime::kStringCompare, 2, 1);
5701 void ICCompareStub::GenerateObjects(MacroAssembler* masm) {
5705 __ j(either_smi, &miss, Label::kNear);
5721 void ICCompareStub::GenerateKnownObjects(MacroAssembler* masm) {
5724 __ j(either_smi, &miss, Label::kNear);
5728 __ Cmp(
rcx, known_map_);
5730 __ Cmp(
rbx, known_map_);
5741 void ICCompareStub::GenerateMiss(MacroAssembler* masm) {
5744 ExternalReference miss =
5745 ExternalReference(IC_Utility(IC::kCompareIC_Miss), masm->isolate());
5753 __ CallExternalReference(miss, 3);
5769 Register properties,
5770 Handle<String>
name,
5777 for (
int i = 0; i < kInlinedProbes; i++) {
5780 Register index =
r0;
5782 __ SmiToInteger32(index,
FieldOperand(properties, kCapacityOffset));
5785 Immediate(name->Hash() + StringDictionary::GetProbeOffset(i)));
5789 __ lea(index, Operand(index, index,
times_2, 0));
5791 Register entity_name =
r0;
5794 __ movq(entity_name, Operand(properties,
5798 __ Cmp(entity_name, masm->isolate()->factory()->undefined_value());
5802 __ Cmp(entity_name, Handle<String>(name));
5807 __ CompareRoot(entity_name, Heap::kTheHoleValueRootIndex);
5808 __ j(
equal, &the_hole, Label::kNear);
5823 __ Push(Handle<Object>(name));
5824 __ push(Immediate(name->Hash()));
5843 ASSERT(!elements.is(r0));
5844 ASSERT(!elements.is(r1));
5849 if (FLAG_debug_code)
__ AbortIfNotString(name);
5854 for (
int i = 0; i < kInlinedProbes; i++) {
5859 __ addl(r1, Immediate(StringDictionary::GetProbeOffset(i)));
5865 __ lea(r1, Operand(r1, r1,
times_2, 0));
5904 Label in_dictionary, maybe_in_dictionary, not_in_dictionary;
5906 Register scratch = result_;
5908 __ SmiToInteger32(scratch,
FieldOperand(dictionary_, kCapacityOffset));
5917 for (
int i = kInlinedProbes; i < kTotalProbes; i++) {
5919 __ movq(scratch, Operand(
rsp, 2 * kPointerSize));
5921 __ addl(scratch, Immediate(StringDictionary::GetProbeOffset(i)));
5923 __ and_(scratch, Operand(
rsp, 0));
5927 __ lea(index_, Operand(scratch, scratch,
times_2, 0));
5930 __ movq(scratch, Operand(dictionary_,
5935 __ Cmp(scratch, masm->isolate()->factory()->undefined_value());
5936 __ j(
equal, ¬_in_dictionary);
5939 __ cmpq(scratch, Operand(
rsp, 3 * kPointerSize));
5951 __ j(
zero, &maybe_in_dictionary);
5955 __ bind(&maybe_in_dictionary);
5960 __ movq(scratch, Immediate(0));
5962 __ ret(2 * kPointerSize);
5965 __ bind(&in_dictionary);
5966 __ movq(scratch, Immediate(1));
5968 __ ret(2 * kPointerSize);
5970 __ bind(¬_in_dictionary);
5971 __ movq(scratch, Immediate(0));
5973 __ ret(2 * kPointerSize);
5977 struct AheadOfTimeWriteBarrierStubList {
5978 Register object, value, address;
5983 #define REG(Name) { kRegister_ ## Name ## _Code }
5985 struct AheadOfTimeWriteBarrierStubList kAheadOfTime[] = {
6024 for (AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime;
6025 !entry->object.is(
no_reg);
6027 if (object_.
is(entry->object) &&
6028 value_.
is(entry->value) &&
6029 address_.
is(entry->address) &&
6030 remembered_set_action_ == entry->action &&
6041 stub1.GetCode()->set_is_pregenerated(
true);
6043 stub2.GetCode()->set_is_pregenerated(
true);
6048 for (AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime;
6049 !entry->object.is(
no_reg);
6056 stub.GetCode()->set_is_pregenerated(
true);
6065 void RecordWriteStub::Generate(MacroAssembler* masm) {
6066 Label skip_to_incremental_noncompacting;
6067 Label skip_to_incremental_compacting;
6074 __ jmp(&skip_to_incremental_noncompacting, Label::kNear);
6075 __ jmp(&skip_to_incremental_compacting, Label::kFar);
6078 __ RememberedSetHelper(object_,
6087 __ bind(&skip_to_incremental_noncompacting);
6090 __ bind(&skip_to_incremental_compacting);
6100 void RecordWriteStub::GenerateIncremental(MacroAssembler* masm, Mode mode) {
6104 Label dont_need_remembered_set;
6106 __ movq(regs_.scratch0(), Operand(regs_.address(), 0));
6107 __ JumpIfNotInNewSpace(regs_.scratch0(),
6109 &dont_need_remembered_set);
6111 __ CheckPageFlag(regs_.object(),
6115 &dont_need_remembered_set);
6119 CheckNeedsToInformIncrementalMarker(
6120 masm, kUpdateRememberedSetOnNoNeedToInformIncrementalMarker, mode);
6121 InformIncrementalMarker(masm, mode);
6122 regs_.Restore(masm);
6123 __ RememberedSetHelper(object_,
6129 __ bind(&dont_need_remembered_set);
6132 CheckNeedsToInformIncrementalMarker(
6133 masm, kReturnOnNoNeedToInformIncrementalMarker, mode);
6134 InformIncrementalMarker(masm, mode);
6135 regs_.Restore(masm);
6140 void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm, Mode mode) {
6141 regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode_);
6144 Register arg2 =
rdx;
6145 Register arg1 =
rcx;
6147 Register arg3 =
rdx;
6148 Register arg2 =
rsi;
6149 Register arg1 =
rdi;
6153 ASSERT(!address.is(regs_.object()));
6154 ASSERT(!address.is(arg1));
6155 __ Move(address, regs_.address());
6156 __ Move(arg1, regs_.object());
6159 __ Move(arg2, address);
6162 __ movq(arg2, Operand(address, 0));
6164 __ LoadAddress(arg3, ExternalReference::isolate_address());
6165 int argument_count = 3;
6167 AllowExternalCallThatCantCauseGC scope(masm);
6168 __ PrepareCallCFunction(argument_count);
6171 ExternalReference::incremental_evacuation_record_write_function(
6177 ExternalReference::incremental_marking_record_write_function(
6181 regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode_);
6185 void RecordWriteStub::CheckNeedsToInformIncrementalMarker(
6186 MacroAssembler* masm,
6187 OnNoNeedToInformIncrementalMarker on_no_need,
6190 Label need_incremental;
6191 Label need_incremental_pop_object;
6195 __ JumpIfBlack(regs_.object(),
6201 regs_.Restore(masm);
6202 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
6203 __ RememberedSetHelper(object_,
6215 __ movq(regs_.scratch0(), Operand(regs_.address(), 0));
6218 Label ensure_not_white;
6220 __ CheckPageFlag(regs_.scratch0(),
6227 __ CheckPageFlag(regs_.object(),
6233 __ bind(&ensure_not_white);
6238 __ push(regs_.object());
6239 __ EnsureNotWhite(regs_.scratch0(),
6242 &need_incremental_pop_object,
6244 __ pop(regs_.object());
6246 regs_.Restore(masm);
6247 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
6248 __ RememberedSetHelper(object_,
6257 __ bind(&need_incremental_pop_object);
6258 __ pop(regs_.object());
6260 __ bind(&need_incremental);
6266 void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) {
6277 Label double_elements;
6279 Label slow_elements;
6280 Label fast_elements;
6282 __ CheckFastElements(
rdi, &double_elements);
6285 __ JumpIfSmi(
rax, &smi_element);
6286 __ CheckFastSmiElements(
rdi, &fast_elements);
6291 __ bind(&slow_elements);
6302 __ TailCallRuntime(Runtime::kStoreArrayLiteralElement, 5, 1);
6305 __ bind(&fast_elements);
6320 __ bind(&smi_element);
6328 __ bind(&double_elements);
6332 __ StoreNumberToDoubleElements(
rax,
6344 #endif // V8_TARGET_ARCH_X64
static const int kResourceDataOffset
static const int kCallerFPOffset
void GenerateFast(MacroAssembler *masm)
void GenerateSlow(MacroAssembler *masm, const RuntimeCallHelper &call_helper)
static const int kLengthOffset
static const int kBitFieldOffset
void GenerateFast(MacroAssembler *masm)
STATIC_CHECK((kStringRepresentationMask|kStringEncodingMask)==Internals::kFullStringRepresentationMask)
static const int kMaxLength
const intptr_t kSmiTagMask
static const int kCodeOffset
static const int kEvacuationCandidateMask
#define CHECK_EQ(expected, value)
void GenerateSlow(MacroAssembler *masm, const RuntimeCallHelper &call_helper)
static const int kCodeEntryOffset
static const int kMaxAsciiCharCode
static const int kPrototypeOrInitialMapOffset
#define COMPARE(asm_, compare_string)
static int SlotOffset(int index)
RecordWriteStub(Register object, Register value, Register address, RememberedSetAction remembered_set_action, SaveFPRegsMode fp_mode)
static const int kStaticOffsetsVectorSize
static const int kArgumentsObjectSize
static void GenerateFixedRegStubsAheadOfTime()
const uint32_t kTwoByteStringTag
const int kFailureTypeTagSize
static const char * GetName(TypeInfo type_info)
static Failure * InternalError()
static void GenerateCopyCharacters(MacroAssembler *masm, Register dest, Register src, Register count, Register scratch, bool ascii)
static void PerformGC(Object *result)
static const char * Name(Value tok)
static Smi * FromInt(int value)
void Generate(MacroAssembler *masm)
static const byte kTwoByteNopInstruction
value format" "after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false, "print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false, "print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false, "report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true, "garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true, "flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true, "use incremental marking") DEFINE_bool(incremental_marking_steps, true, "do incremental marking steps") DEFINE_bool(trace_incremental_marking, false, "trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true, "Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false, "Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true, "use inline caching") DEFINE_bool(native_code_counters, false, "generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false, "Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true, "Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false, "Never perform compaction on full GC-testing only") DEFINE_bool(compact_code_space, true, "Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true, "Flush inline caches prior to mark compact collection and" "flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0, "Default seed for initializing random generator" "(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true, "allows verbose printing") DEFINE_bool(allow_natives_syntax, false, "allow natives syntax") DEFINE_bool(trace_sim, false, "Trace simulator execution") DEFINE_bool(check_icache, false, "Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0, "Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8, "Stack alingment in bytes in simulator(4 or 8, 8 is default)") DEFINE_bool(trace_exception, false, "print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false, "preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true, "randomize hashes to avoid predictable hash collisions" "(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0, "Fixed seed to use to hash property keys(0 means random)" "(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false, "activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true, "generate optimized regexp code") DEFINE_bool(testing_bool_flag, true, "testing_bool_flag") DEFINE_int(testing_int_flag, 13, "testing_int_flag") DEFINE_float(testing_float_flag, 2.5, "float-flag") DEFINE_string(testing_string_flag, "Hello, world!", "string-flag") DEFINE_int(testing_prng_seed, 42, "Seed used for threading test randomness") DEFINE_string(testing_serialization_file, "/tmp/serdes", "file in which to serialize heap") DEFINE_bool(help, false, "Print usage message, including flags, on console") DEFINE_bool(dump_counters, false, "Dump counters on exit") DEFINE_string(map_counters, "", "Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT, "Pass all remaining arguments to the script.Alias for\"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#43"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2#define FLAG_MODE_DEFINE_DEFAULTS#1"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flag-definitions.h"1#define FLAG_FULL(ftype, ctype, nam, def, cmt)#define FLAG_READONLY(ftype, ctype, nam, def, cmt)#define DEFINE_implication(whenflag, thenflag)#define DEFINE_bool(nam, def, cmt)#define DEFINE_int(nam, def, cmt)#define DEFINE_float(nam, def, cmt)#define DEFINE_string(nam, def, cmt)#define DEFINE_args(nam, def, cmt)#define FLAG DEFINE_bool(use_strict, false,"enforce strict mode") DEFINE_bool(es5_readonly, false,"activate correct semantics for inheriting readonliness") DEFINE_bool(es52_globals, false,"activate new semantics for global var declarations") DEFINE_bool(harmony_typeof, false,"enable harmony semantics for typeof") DEFINE_bool(harmony_scoping, false,"enable harmony block scoping") DEFINE_bool(harmony_modules, false,"enable harmony modules (implies block scoping)") DEFINE_bool(harmony_proxies, false,"enable harmony proxies") DEFINE_bool(harmony_collections, false,"enable harmony collections (sets, maps, and weak maps)") DEFINE_bool(harmony, false,"enable all harmony features (except typeof)") DEFINE_implication(harmony, harmony_scoping) DEFINE_implication(harmony, harmony_modules) DEFINE_implication(harmony, harmony_proxies) DEFINE_implication(harmony, harmony_collections) DEFINE_implication(harmony_modules, harmony_scoping) DEFINE_bool(packed_arrays, false,"optimizes arrays that have no holes") DEFINE_bool(smi_only_arrays, true,"tracks arrays with only smi values") DEFINE_bool(clever_optimizations, true,"Optimize object size, Array shift, DOM strings and string +") DEFINE_bool(unbox_double_arrays, true,"automatically unbox arrays of doubles") DEFINE_bool(string_slices, true,"use string slices") DEFINE_bool(crankshaft, true,"use crankshaft") DEFINE_string(hydrogen_filter,"","optimization filter") DEFINE_bool(use_range, true,"use hydrogen range analysis") DEFINE_bool(eliminate_dead_phis, true,"eliminate dead phis") DEFINE_bool(use_gvn, true,"use hydrogen global value numbering") DEFINE_bool(use_canonicalizing, true,"use hydrogen instruction canonicalizing") DEFINE_bool(use_inlining, true,"use function inlining") DEFINE_int(max_inlined_source_size, 600,"maximum source size in bytes considered for a single inlining") DEFINE_int(max_inlined_nodes, 196,"maximum number of AST nodes considered for a single inlining") DEFINE_int(max_inlined_nodes_cumulative, 196,"maximum cumulative number of AST nodes considered for inlining") DEFINE_bool(loop_invariant_code_motion, true,"loop invariant code motion") DEFINE_bool(collect_megamorphic_maps_from_stub_cache, true,"crankshaft harvests type feedback from stub cache") DEFINE_bool(hydrogen_stats, false,"print statistics for hydrogen") DEFINE_bool(trace_hydrogen, false,"trace generated hydrogen to file") DEFINE_string(trace_phase,"Z","trace generated IR for specified phases") DEFINE_bool(trace_inlining, false,"trace inlining decisions") DEFINE_bool(trace_alloc, false,"trace register allocator") DEFINE_bool(trace_all_uses, false,"trace all use positions") DEFINE_bool(trace_range, false,"trace range analysis") DEFINE_bool(trace_gvn, false,"trace global value numbering") DEFINE_bool(trace_representation, false,"trace representation types") DEFINE_bool(stress_pointer_maps, false,"pointer map for every instruction") DEFINE_bool(stress_environments, false,"environment for every instruction") DEFINE_int(deopt_every_n_times, 0,"deoptimize every n times a deopt point is passed") DEFINE_bool(trap_on_deopt, false,"put a break point before deoptimizing") DEFINE_bool(deoptimize_uncommon_cases, true,"deoptimize uncommon cases") DEFINE_bool(polymorphic_inlining, true,"polymorphic inlining") DEFINE_bool(use_osr, true,"use on-stack replacement") DEFINE_bool(array_bounds_checks_elimination, false,"perform array bounds checks elimination") DEFINE_bool(array_index_dehoisting, false,"perform array index dehoisting") DEFINE_bool(trace_osr, false,"trace on-stack replacement") DEFINE_int(stress_runs, 0,"number of stress runs") DEFINE_bool(optimize_closures, true,"optimize closures") DEFINE_bool(inline_construct, true,"inline constructor calls") DEFINE_bool(inline_arguments, true,"inline functions with arguments object") DEFINE_int(loop_weight, 1,"loop weight for representation inference") DEFINE_bool(optimize_for_in, true,"optimize functions containing for-in loops") DEFINE_bool(experimental_profiler, true,"enable all profiler experiments") DEFINE_bool(watch_ic_patching, false,"profiler considers IC stability") DEFINE_int(frame_count, 1,"number of stack frames inspected by the profiler") DEFINE_bool(self_optimization, false,"primitive functions trigger their own optimization") DEFINE_bool(direct_self_opt, false,"call recompile stub directly when self-optimizing") DEFINE_bool(retry_self_opt, false,"re-try self-optimization if it failed") DEFINE_bool(count_based_interrupts, false,"trigger profiler ticks based on counting instead of timing") DEFINE_bool(interrupt_at_exit, false,"insert an interrupt check at function exit") DEFINE_bool(weighted_back_edges, false,"weight back edges by jump distance for interrupt triggering") DEFINE_int(interrupt_budget, 5900,"execution budget before interrupt is triggered") DEFINE_int(type_info_threshold, 15,"percentage of ICs that must have type info to allow optimization") DEFINE_int(self_opt_count, 130,"call count before self-optimization") DEFINE_implication(experimental_profiler, watch_ic_patching) DEFINE_implication(experimental_profiler, self_optimization) DEFINE_implication(experimental_profiler, retry_self_opt) DEFINE_implication(experimental_profiler, count_based_interrupts) DEFINE_implication(experimental_profiler, interrupt_at_exit) DEFINE_implication(experimental_profiler, weighted_back_edges) DEFINE_bool(trace_opt_verbose, false,"extra verbose compilation tracing") DEFINE_implication(trace_opt_verbose, trace_opt) DEFINE_bool(debug_code, false,"generate extra code (assertions) for debugging") DEFINE_bool(code_comments, false,"emit comments in code disassembly") DEFINE_bool(enable_sse2, true,"enable use of SSE2 instructions if available") DEFINE_bool(enable_sse3, true,"enable use of SSE3 instructions if available") DEFINE_bool(enable_sse4_1, true,"enable use of SSE4.1 instructions if available") DEFINE_bool(enable_cmov, true,"enable use of CMOV instruction if available") DEFINE_bool(enable_rdtsc, true,"enable use of RDTSC instruction if available") DEFINE_bool(enable_sahf, true,"enable use of SAHF instruction if available (X64 only)") DEFINE_bool(enable_vfp3, true,"enable use of VFP3 instructions if available - this implies ""enabling ARMv7 instructions (ARM only)") DEFINE_bool(enable_armv7, true,"enable use of ARMv7 instructions if available (ARM only)") DEFINE_bool(enable_fpu, true,"enable use of MIPS FPU instructions if available (MIPS only)") DEFINE_string(expose_natives_as, NULL,"expose natives in global object") DEFINE_string(expose_debug_as, NULL,"expose debug in global object") DEFINE_bool(expose_gc, false,"expose gc extension") DEFINE_bool(expose_externalize_string, false,"expose externalize string extension") DEFINE_int(stack_trace_limit, 10,"number of stack frames to capture") DEFINE_bool(builtins_in_stack_traces, false,"show built-in functions in stack traces") DEFINE_bool(disable_native_files, false,"disable builtin natives files") DEFINE_bool(inline_new, true,"use fast inline allocation") DEFINE_bool(stack_trace_on_abort, true,"print a stack trace if an assertion failure occurs") DEFINE_bool(trace, false,"trace function calls") DEFINE_bool(mask_constants_with_cookie, true,"use random jit cookie to mask large constants") DEFINE_bool(lazy, true,"use lazy compilation") DEFINE_bool(trace_opt, false,"trace lazy optimization") DEFINE_bool(trace_opt_stats, false,"trace lazy optimization statistics") DEFINE_bool(opt, true,"use adaptive optimizations") DEFINE_bool(always_opt, false,"always try to optimize functions") DEFINE_bool(prepare_always_opt, false,"prepare for turning on always opt") DEFINE_bool(trace_deopt, false,"trace deoptimization") DEFINE_int(min_preparse_length, 1024,"minimum length for automatic enable preparsing") DEFINE_bool(always_full_compiler, false,"try to use the dedicated run-once backend for all code") DEFINE_bool(trace_bailout, false,"print reasons for falling back to using the classic V8 backend") DEFINE_bool(compilation_cache, true,"enable compilation cache") DEFINE_bool(cache_prototype_transitions, true,"cache prototype transitions") DEFINE_bool(trace_debug_json, false,"trace debugging JSON request/response") DEFINE_bool(debugger_auto_break, true,"automatically set the debug break flag when debugger commands are ""in the queue") DEFINE_bool(enable_liveedit, true,"enable liveedit experimental feature") DEFINE_bool(break_on_abort, true,"always cause a debug break before aborting") DEFINE_int(stack_size, kPointerSize *123,"default size of stack region v8 is allowed to use (in kBytes)") DEFINE_int(max_stack_trace_source_length, 300,"maximum length of function source code printed in a stack trace.") DEFINE_bool(always_inline_smi_code, false,"always inline smi code in non-opt code") DEFINE_int(max_new_space_size, 0,"max size of the new generation (in kBytes)") DEFINE_int(max_old_space_size, 0,"max size of the old generation (in Mbytes)") DEFINE_int(max_executable_size, 0,"max size of executable memory (in Mbytes)") DEFINE_bool(gc_global, false,"always perform global GCs") DEFINE_int(gc_interval,-1,"garbage collect after <n> allocations") DEFINE_bool(trace_gc, false,"print one trace line following each garbage collection") DEFINE_bool(trace_gc_nvp, false,"print one detailed trace line in name=value format ""after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false,"print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false,"print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false,"report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true,"garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true,"flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true,"use incremental marking") DEFINE_bool(incremental_marking_steps, true,"do incremental marking steps") DEFINE_bool(trace_incremental_marking, false,"trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true,"Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false,"Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true,"use inline caching") DEFINE_bool(native_code_counters, false,"generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false,"Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true,"Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false,"Never perform compaction on full GC - testing only") DEFINE_bool(compact_code_space, true,"Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true,"Flush inline caches prior to mark compact collection and ""flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0,"Default seed for initializing random generator ""(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true,"allows verbose printing") DEFINE_bool(allow_natives_syntax, false,"allow natives syntax") DEFINE_bool(trace_sim, false,"Trace simulator execution") DEFINE_bool(check_icache, false,"Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0,"Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8,"Stack alingment in bytes in simulator (4 or 8, 8 is default)") DEFINE_bool(trace_exception, false,"print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false,"preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true,"randomize hashes to avoid predictable hash collisions ""(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0,"Fixed seed to use to hash property keys (0 means random)""(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false,"activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true,"generate optimized regexp code") DEFINE_bool(testing_bool_flag, true,"testing_bool_flag") DEFINE_int(testing_int_flag, 13,"testing_int_flag") DEFINE_float(testing_float_flag, 2.5,"float-flag") DEFINE_string(testing_string_flag,"Hello, world!","string-flag") DEFINE_int(testing_prng_seed, 42,"Seed used for threading test randomness") DEFINE_string(testing_serialization_file,"/tmp/serdes","file in which to serialize heap") DEFINE_bool(help, false,"Print usage message, including flags, on console") DEFINE_bool(dump_counters, false,"Dump counters on exit") DEFINE_string(map_counters,"","Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT,"Pass all remaining arguments to the script. Alias for \"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#47"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2 namespace{struct Flag{enum FlagType{TYPE_BOOL, TYPE_INT, TYPE_FLOAT, TYPE_STRING, TYPE_ARGS} name
static const int kDataOffset
static const int kGlobalReceiverOffset
void Generate(MacroAssembler *masm)
static Failure * OutOfMemoryException()
static const int kEmptyHashField
static void GenerateHashGetHash(MacroAssembler *masm, Register hash)
static const int kExponentBias
static Handle< Object > UninitializedSentinel(Isolate *isolate)
static Failure * Exception()
void Generate(MacroAssembler *masm)
virtual bool IsPregenerated()
void Generate(MacroAssembler *masm)
static const int kCallerSPOffset
bool CanBeUndetectable() const
#define ASSERT(condition)
const int kPointerSizeLog2
static const int kInstanceSizeOffset
static void GenerateCompareFlatAsciiStrings(MacroAssembler *masm, Register left, Register right, Register scratch1, Register scratch2, Register scratch3, Register scratch4)
static Handle< Object > MegamorphicSentinel(Isolate *isolate)
static const char * GetName(TypeInfo type_info)
const uint32_t kStringRepresentationMask
static void GenerateOperation(MacroAssembler *masm, TranscendentalCache::Type type)
MemOperand GlobalObjectOperand()
static const int kEntrySize
static const int kGlobalContextOffset
MemOperand ContextOperand(Register context, int index)
static const int kContextOffset
const uint32_t kAsciiDataHintTag
const uint32_t kShortExternalStringMask
static void GenerateNegativeLookup(MacroAssembler *masm, Label *miss, Label *done, Register receiver, Register properties, Handle< String > name, Register scratch0)
static const int kLastSubjectOffset
static const int kZeroHash
void Generate(MacroAssembler *masm)
static const int kHashFieldOffset
static const int kLastCaptureCountOffset
static const int kFirstOffset
static const int kMinLength
StringDictionaryLookupStub(LookupMode mode)
const uint32_t kNotStringTag
static const int kParentOffset
static const int kLiteralsOffset
static const int kArgumentsObjectSizeStrict
static void GenerateCopyCharactersREP(MacroAssembler *masm, Register dest, Register src, Register count, Register scratch, bool ascii)
STATIC_ASSERT((FixedDoubleArray::kHeaderSize &kDoubleAlignmentMask)==0)
static const int kLengthOffset
static const int kCapacityOffset
const uint32_t kIsSymbolMask
const intptr_t kFailureTagMask
static const int kValueOffset
const int kFailureTagSize
static void GenerateFlatAsciiStringEquals(MacroAssembler *masm, Register left, Register right, Register scratch1, Register scratch2, Register scratch3)
static const int kIrregexpCaptureCountOffset
static const int kInputOffset
static bool IsBitOp(Value op)
const uint32_t kIsIndirectStringMask
void Generate(MacroAssembler *masm)
virtual bool IsPregenerated()
static const int kStringWrapperSafeForDefaultValueOf
Operand FieldOperand(Register object, int offset)
const uint32_t kAsciiDataHintMask
Operand StackSpaceOperand(int index)
static const byte kFiveByteNopInstruction
void Generate(MacroAssembler *masm)
static const int kPropertiesOffset
static const int kMinLength
const uint32_t kShortExternalStringTag
static void GenerateHashAddCharacter(MacroAssembler *masm, Register hash, Register character)
static void Generate(MacroAssembler *masm, Register string, Register index, Register result, Label *call_runtime)
static const int kHeaderSize
static const int kNextFunctionLinkOffset
static int SizeFor(int length)
static const int kElementsOffset
const int kRootRegisterBias
const uint32_t kStringTag
static bool IsEqualityOp(Value op)
static const int kOffsetOffset
void Generate(MacroAssembler *masm)
static const int kLengthOffset
static int SizeFor(int length)
void GenerateSlow(MacroAssembler *masm, const RuntimeCallHelper &call_helper)
virtual void Generate(MacroAssembler *masm)
static const int kLastMatchOverhead
static const int kHeaderSize
void Generate(MacroAssembler *masm)
static const int kMapOffset
bool is(Register reg) const
static const int kSkipEvacuationSlotsRecordingMask
const uint32_t kIsNotStringMask
static void GenerateFixedRegStubsAheadOfTime()
const uint32_t kSlicedNotConsMask
static const int kLengthOffset
void Generate(MacroAssembler *masm)
void Generate(MacroAssembler *masm)
static const int kSecondOffset
static void GeneratePositiveLookup(MacroAssembler *masm, Label *miss, Label *done, Register elements, Register name, Register r0, Register r1)
static const int kArgumentsLengthIndex
const Register kScratchRegister
static const int kContextOffset
static const int kFunctionOffset
static const int kFirstCaptureOffset
static const uint32_t kHashBitMask
static const int kLastInputOffset
v8::Handle< v8::Value > Load(const v8::Arguments &args)
static const int kHeaderSize
void GenerateBody(MacroAssembler *masm, bool is_construct)
static const int kDataAsciiCodeOffset
Condition NegateCondition(Condition cond)
#define ASSERT_EQ(v1, v2)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination trace on stack replacement optimize closures functions with arguments object optimize functions containing for in loops profiler considers IC stability primitive functions trigger their own optimization re try self optimization if it failed insert an interrupt check at function exit execution budget before interrupt is triggered call count before self optimization self_optimization count_based_interrupts weighted_back_edges trace_opt emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 enable use of ARMv7 instructions if enable use of MIPS FPU instructions if NULL
static void GenerateAheadOfTime()
static const int kArgumentsCalleeIndex
static const int kIsUndetectable
static const int kHeaderSize
void Generate(MacroAssembler *masm)
void GenerateFast(MacroAssembler *masm)
static void GenerateLookupNumberStringCache(MacroAssembler *masm, Register object, Register result, Register scratch1, Register scratch2, Register scratch3, bool object_is_smi, Label *not_found)
static const int kDataTagOffset
static const int kPrototypeOffset
static const int kElementsStartOffset
#define RUNTIME_ENTRY(name, nargs, ressize)
static void GenerateTwoCharacterSymbolTableProbe(MacroAssembler *masm, Register c1, Register c2, Register scratch1, Register scratch2, Register scratch3, Register scratch4, Register scratch5, Label *not_found)
static const int kMaxLength
static const int kValueOffset
bool Contains(Type type) const
const uint32_t kSymbolTag
const uint32_t kAsciiStringTag
static const int kConstructStubOffset
static const int kExponentBits
static const int kHashShift
static const int kSharedFunctionInfoOffset
static const int kMaxValue
void Generate(MacroAssembler *masm)
static const int kBitField2Offset
static const int kMantissaBits
void Generate(MacroAssembler *masm)
CEntryStub(int result_size, SaveFPRegsMode save_doubles=kDontSaveFPRegs)
void check(i::Vector< const char > string)
static const int kValueOffset
static const int kDataUC16CodeOffset
void Generate(MacroAssembler *masm)
StoreBufferOverflowStub(SaveFPRegsMode save_fp)
static void GenerateHashInit(MacroAssembler *masm, Register hash, Register character)
static bool IsOrderedRelationalCompareOp(Value op)
const uint32_t kStringEncodingMask
static const int kInstanceTypeOffset
static const int kIndexOffset
void Generate(MacroAssembler *masm)
void Generate(MacroAssembler *masm)