30 #if defined(V8_TARGET_ARCH_IA32)
43 #define __ ACCESS_MASM(masm)
47 Label check_heap_number, call_builtin;
48 __ JumpIfNotSmi(
eax, &check_heap_number, Label::kNear);
51 __ bind(&check_heap_number);
53 Factory* factory = masm->isolate()->factory();
54 __ cmp(
ebx, Immediate(factory->heap_number_map()));
58 __ bind(&call_builtin);
69 Counters* counters = masm->isolate()->counters();
74 __ IncrementCounter(counters->fast_new_closure_total(), 1);
92 Factory* factory = masm->isolate()->factory();
93 __ mov(
ebx, Immediate(factory->empty_fixed_array()));
97 Immediate(factory->the_hole_value()));
105 Label check_optimized;
106 Label install_unoptimized;
107 if (FLAG_cache_optimized_code) {
110 __ j(
not_zero, &check_optimized, Label::kNear);
112 __ bind(&install_unoptimized);
114 Immediate(factory->undefined_value()));
122 __ bind(&check_optimized);
124 __ IncrementCounter(counters->fast_new_closure_try_optimized(), 1);
129 Label install_optimized;
133 __ j(
equal, &install_optimized);
150 __ bind(&install_optimized);
151 __ IncrementCounter(counters->fast_new_closure_install_optimized(), 1);
169 __ RecordWriteContextSlot(
182 __ jmp(&install_unoptimized);
190 __ push(Immediate(factory->false_value()));
192 __ TailCallRuntime(Runtime::kNewClosure, 3, 1);
207 Factory* factory = masm->isolate()->factory();
209 factory->function_context_map());
214 __ Set(
ebx, Immediate(0));
224 __ mov(
ebx, factory->undefined_value());
225 for (
int i = Context::MIN_CONTEXT_SLOTS; i < length; i++) {
235 __ TailCallRuntime(Runtime::kNewFunctionContext, 1, 1);
258 Factory* factory = masm->isolate()->factory();
260 factory->block_context_map());
268 Label after_sentinel;
269 __ JumpIfNotSmi(
ecx, &after_sentinel, Label::kNear);
270 if (FLAG_debug_code) {
271 const char*
message =
"Expected 0 as a Smi sentinel";
278 __ bind(&after_sentinel);
292 factory->the_hole_value());
294 __ mov(
ebx, factory->the_hole_value());
295 for (
int i = 0; i < slots_; i++) {
306 __ TailCallRuntime(Runtime::kPushBlockContext, 2, 1);
310 static void GenerateFastCloneShallowArrayCommon(
311 MacroAssembler* masm,
321 int elements_size = 0;
325 : FixedArray::SizeFor(length);
345 __ lea(
edx, Operand(
eax, JSArray::kSize));
361 while (i < elements_size) {
366 ASSERT(i == elements_size);
388 Factory* factory = masm->isolate()->factory();
389 __ cmp(
ecx, factory->undefined_value());
396 Label double_elements, check_fast_elements;
398 __ CheckMap(
ebx, factory->fixed_cow_array_map(),
400 GenerateFastCloneShallowArrayCommon(masm, 0,
404 __ bind(&check_fast_elements);
405 __ CheckMap(
ebx, factory->fixed_array_map(),
407 GenerateFastCloneShallowArrayCommon(masm, length_,
411 __ bind(&double_elements);
416 if (FLAG_debug_code) {
418 Handle<Map> expected_map;
420 message =
"Expected (writable) fixed array";
421 expected_map = factory->fixed_array_map();
423 message =
"Expected (writable) fixed double array";
424 expected_map = factory->fixed_double_array_map();
427 message =
"Expected copy-on-write fixed array";
428 expected_map = factory->fixed_cow_array_map();
437 GenerateFastCloneShallowArrayCommon(masm, length_, mode, &slow_case);
442 __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1);
464 Factory* factory = masm->isolate()->factory();
465 __ cmp(
ecx, factory->undefined_value());
485 __ ret(4 * kPointerSize);
488 __ TailCallRuntime(Runtime::kCreateObjectLiteralShallow, 4, 1);
498 Factory* factory = masm->isolate()->factory();
499 const Register argument =
eax;
500 const Register map =
edx;
503 __ mov(argument, Operand(
esp, 1 * kPointerSize));
507 CheckOddball(masm,
UNDEFINED, Heap::kUndefinedValueRootIndex,
false);
510 CheckOddball(masm,
BOOLEAN, Heap::kFalseValueRootIndex,
false);
511 CheckOddball(masm,
BOOLEAN, Heap::kTrueValueRootIndex,
true);
514 CheckOddball(masm,
NULL_TYPE, Heap::kNullValueRootIndex,
false);
519 __ JumpIfNotSmi(argument, ¬_smi, Label::kNear);
521 if (!tos_.
is(argument)) {
522 __ mov(tos_, argument);
524 __ ret(1 * kPointerSize);
528 __ JumpIfSmi(argument, &patch, Label::kNear);
538 Label not_undetectable;
539 __ j(
zero, ¬_undetectable, Label::kNear);
540 __ Set(tos_, Immediate(0));
541 __ ret(1 * kPointerSize);
542 __ bind(¬_undetectable);
550 __ j(
below, ¬_js_object, Label::kNear);
552 if (!tos_.
is(argument)) {
553 __ Set(tos_, Immediate(1));
555 __ ret(1 * kPointerSize);
556 __ bind(¬_js_object);
565 __ ret(1 * kPointerSize);
566 __ bind(¬_string);
571 Label not_heap_number, false_result;
572 __ cmp(map, factory->heap_number_map());
577 __ j(
zero, &false_result, Label::kNear);
579 if (!tos_.
is(argument)) {
580 __ Set(tos_, Immediate(1));
582 __ ret(1 * kPointerSize);
583 __ bind(&false_result);
584 __ Set(tos_, Immediate(0));
585 __ ret(1 * kPointerSize);
586 __ bind(¬_heap_number);
590 GenerateTypeTransition(masm);
600 CpuFeatures::Scope scope(
SSE2);
607 const int argument_count = 1;
609 AllowExternalCallThatCantCauseGC scope(masm);
610 __ PrepareCallCFunction(argument_count,
ecx);
611 __ mov(Operand(
esp, 0 * kPointerSize),
612 Immediate(ExternalReference::isolate_address()));
614 ExternalReference::store_buffer_overflow_function(masm->isolate()),
617 CpuFeatures::Scope scope(
SSE2);
629 void ToBooleanStub::CheckOddball(MacroAssembler* masm,
633 const Register argument =
eax;
636 Label different_value;
637 __ CompareRoot(argument, value);
641 __ Set(tos_, Immediate(0));
642 }
else if (!tos_.
is(argument)) {
645 __ Set(tos_, Immediate(1));
647 __ ret(1 * kPointerSize);
648 __ bind(&different_value);
653 void ToBooleanStub::GenerateTypeTransition(MacroAssembler* masm) {
660 __ TailCallExternalReference(
661 ExternalReference(IC_Utility(IC::kToBoolean_Patch), masm->isolate()),
667 class FloatingPointHelper :
public AllStatic {
678 static void LoadFloatOperand(MacroAssembler* masm, Register number);
684 static void LoadFloatOperands(MacroAssembler* masm,
686 ArgLocation arg_location = ARGS_ON_STACK);
690 static void LoadFloatSmis(MacroAssembler* masm, Register scratch);
695 static void CheckFloatOperands(MacroAssembler* masm,
701 static void CheckFloatOperandsAreInt32(MacroAssembler* masm,
706 static void LoadUnknownsAsIntegers(MacroAssembler* masm,
708 Label* operand_conversion_failure);
714 static void CheckLoadedIntegersWereInt32(MacroAssembler* masm,
721 static void LoadSSE2Operands(MacroAssembler* masm);
727 static void LoadSSE2Operands(MacroAssembler* masm, Label* not_numbers);
731 static void LoadSSE2Smis(MacroAssembler* masm, Register scratch);
735 static void CheckSSE2OperandsAreInt32(MacroAssembler* masm,
745 static void IntegerConvert(MacroAssembler* masm,
748 Label* conversion_failure) {
750 Label done, right_exponent, normal_exponent;
751 Register scratch =
ebx;
752 Register scratch2 =
edi;
756 __ mov(scratch2, scratch);
759 CpuFeatures::Scope scope(
SSE3);
761 static const uint32_t kTooBigExponent =
763 __ cmp(scratch2, Immediate(kTooBigExponent));
768 __ sub(
esp, Immediate(
sizeof(uint64_t)));
770 __ fisttp_d(Operand(
esp, 0));
772 __ add(
esp, Immediate(
sizeof(uint64_t)));
781 const uint32_t non_smi_exponent =
783 __ cmp(scratch2, Immediate(non_smi_exponent));
786 __ j(
equal, &right_exponent, Label::kNear);
789 __ j(
less, &normal_exponent, Label::kNear);
794 const uint32_t big_non_smi_exponent =
796 __ cmp(scratch2, Immediate(big_non_smi_exponent));
800 __ mov(scratch2, scratch);
809 __ shl(scratch2, big_shift_distance);
814 __ shr(
ecx, 32 - big_shift_distance);
815 __ or_(
ecx, scratch2);
817 __ test(scratch, scratch);
820 __ jmp(&done, Label::kNear);
823 __ bind(&normal_exponent);
829 const uint32_t zero_exponent =
831 __ sub(scratch2, Immediate(zero_exponent));
833 __ j(
less, &done, Label::kNear);
837 __ mov(
ecx, Immediate(30));
838 __ sub(
ecx, scratch2);
840 __ bind(&right_exponent);
850 __ shl(scratch, shift_distance);
857 __ shr(scratch2, 32 - shift_distance);
858 __ or_(scratch2, scratch);
867 __ mov(
ecx, scratch2);
868 __ jmp(&done, Label::kNear);
870 __ sub(
ecx, scratch2);
876 void UnaryOpStub::PrintName(StringStream* stream) {
878 const char* overwrite_name =
NULL;
883 stream->Add(
"UnaryOpStub_%s_%s_%s",
891 void UnaryOpStub::Generate(MacroAssembler* masm) {
892 switch (operand_type_) {
894 GenerateTypeTransition(masm);
897 GenerateSmiStub(masm);
900 GenerateHeapNumberStub(masm);
903 GenerateGenericStub(masm);
909 void UnaryOpStub::GenerateTypeTransition(MacroAssembler* masm) {
921 __ TailCallExternalReference(
922 ExternalReference(IC_Utility(IC::kUnaryOp_Patch), masm->isolate()), 4, 1);
927 void UnaryOpStub::GenerateSmiStub(MacroAssembler* masm) {
930 GenerateSmiStubSub(masm);
933 GenerateSmiStubBitNot(masm);
941 void UnaryOpStub::GenerateSmiStubSub(MacroAssembler* masm) {
942 Label non_smi, undo, slow;
943 GenerateSmiCodeSub(masm, &non_smi, &undo, &slow,
944 Label::kNear, Label::kNear, Label::kNear);
946 GenerateSmiCodeUndo(masm);
949 GenerateTypeTransition(masm);
953 void UnaryOpStub::GenerateSmiStubBitNot(MacroAssembler* masm) {
955 GenerateSmiCodeBitNot(masm, &non_smi);
957 GenerateTypeTransition(masm);
961 void UnaryOpStub::GenerateSmiCodeSub(MacroAssembler* masm,
965 Label::Distance non_smi_near,
966 Label::Distance undo_near,
967 Label::Distance slow_near) {
969 __ JumpIfNotSmi(
eax, non_smi, non_smi_near);
973 __ j(
zero, slow, slow_near);
977 __ Set(
eax, Immediate(0));
984 void UnaryOpStub::GenerateSmiCodeBitNot(
985 MacroAssembler* masm,
987 Label::Distance non_smi_near) {
989 __ JumpIfNotSmi(
eax, non_smi, non_smi_near);
998 void UnaryOpStub::GenerateSmiCodeUndo(MacroAssembler* masm) {
1004 void UnaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) {
1007 GenerateHeapNumberStubSub(masm);
1009 case Token::BIT_NOT:
1010 GenerateHeapNumberStubBitNot(masm);
1018 void UnaryOpStub::GenerateHeapNumberStubSub(MacroAssembler* masm) {
1019 Label non_smi, undo, slow, call_builtin;
1020 GenerateSmiCodeSub(masm, &non_smi, &undo, &call_builtin, Label::kNear);
1022 GenerateHeapNumberCodeSub(masm, &slow);
1024 GenerateSmiCodeUndo(masm);
1026 GenerateTypeTransition(masm);
1027 __ bind(&call_builtin);
1028 GenerateGenericCodeFallback(masm);
1032 void UnaryOpStub::GenerateHeapNumberStubBitNot(
1033 MacroAssembler* masm) {
1034 Label non_smi, slow;
1035 GenerateSmiCodeBitNot(masm, &non_smi, Label::kNear);
1037 GenerateHeapNumberCodeBitNot(masm, &slow);
1039 GenerateTypeTransition(masm);
1043 void UnaryOpStub::GenerateHeapNumberCodeSub(MacroAssembler* masm,
1046 __ cmp(
edx, masm->isolate()->factory()->heap_number_map());
1056 Label slow_allocate_heapnumber, heapnumber_allocated;
1057 __ AllocateHeapNumber(
eax,
ebx,
ecx, &slow_allocate_heapnumber);
1058 __ jmp(&heapnumber_allocated, Label::kNear);
1060 __ bind(&slow_allocate_heapnumber);
1064 __ CallRuntime(Runtime::kNumberAlloc, 0);
1068 __ bind(&heapnumber_allocated);
1080 void UnaryOpStub::GenerateHeapNumberCodeBitNot(MacroAssembler* masm,
1083 __ cmp(
edx, masm->isolate()->factory()->heap_number_map());
1092 __ cmp(
ecx, 0xc0000000);
1093 __ j(
sign, &try_float, Label::kNear);
1101 __ bind(&try_float);
1103 Label slow_allocate_heapnumber, heapnumber_allocated;
1105 __ AllocateHeapNumber(
eax,
edx,
edi, &slow_allocate_heapnumber);
1106 __ jmp(&heapnumber_allocated);
1108 __ bind(&slow_allocate_heapnumber);
1115 __ CallRuntime(Runtime::kNumberAlloc, 0);
1124 __ bind(&heapnumber_allocated);
1127 CpuFeatures::Scope use_sse2(
SSE2);
1132 __ fild_s(Operand(
esp, 0));
1141 void UnaryOpStub::GenerateGenericStub(MacroAssembler* masm) {
1144 GenerateGenericStubSub(masm);
1146 case Token::BIT_NOT:
1147 GenerateGenericStubBitNot(masm);
1155 void UnaryOpStub::GenerateGenericStubSub(MacroAssembler* masm) {
1156 Label non_smi, undo, slow;
1157 GenerateSmiCodeSub(masm, &non_smi, &undo, &slow, Label::kNear);
1159 GenerateHeapNumberCodeSub(masm, &slow);
1161 GenerateSmiCodeUndo(masm);
1163 GenerateGenericCodeFallback(masm);
1167 void UnaryOpStub::GenerateGenericStubBitNot(MacroAssembler* masm) {
1168 Label non_smi, slow;
1169 GenerateSmiCodeBitNot(masm, &non_smi, Label::kNear);
1171 GenerateHeapNumberCodeBitNot(masm, &slow);
1173 GenerateGenericCodeFallback(masm);
1177 void UnaryOpStub::GenerateGenericCodeFallback(MacroAssembler* masm) {
1186 case Token::BIT_NOT:
1195 void BinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) {
1210 __ TailCallExternalReference(
1211 ExternalReference(IC_Utility(IC::kBinaryOp_Patch),
1220 void BinaryOpStub::GenerateTypeTransitionWithSavedArgs(MacroAssembler* masm) {
1233 __ TailCallExternalReference(
1234 ExternalReference(IC_Utility(IC::kBinaryOp_Patch),
1241 void BinaryOpStub::Generate(MacroAssembler* masm) {
1244 AllowStubCallsScope allow_stub_calls(masm,
true);
1246 switch (operands_type_) {
1248 GenerateTypeTransition(masm);
1251 GenerateSmiStub(masm);
1254 GenerateInt32Stub(masm);
1257 GenerateHeapNumberStub(masm);
1260 GenerateOddballStub(masm);
1263 GenerateBothStringStub(masm);
1266 GenerateStringStub(masm);
1269 GenerateGeneric(masm);
1277 void BinaryOpStub::PrintName(StringStream* stream) {
1279 const char* overwrite_name;
1284 default: overwrite_name =
"UnknownOverwrite";
break;
1286 stream->Add(
"BinaryOpStub_%s_%s_%s",
1293 void BinaryOpStub::GenerateSmiCode(
1294 MacroAssembler* masm,
1296 SmiCodeGenerateHeapNumberResults allow_heapnumber_results) {
1299 Comment load_comment(masm,
"-- Load arguments");
1300 Register left =
edx;
1301 Register right =
eax;
1302 if (op_ ==
Token::DIV || op_ == Token::MOD) {
1311 Comment smi_check_comment(masm,
"-- Smi check arguments");
1313 Register combined =
ecx;
1314 ASSERT(!left.is(combined) && !right.is(combined));
1321 __ or_(right, left);
1325 case Token::BIT_XOR:
1326 case Token::BIT_AND:
1332 __ mov(combined, right);
1333 __ or_(combined, left);
1343 __ or_(right, left);
1353 __ JumpIfNotSmi(combined, ¬_smis);
1357 Comment perform_smi(masm,
"-- Perform smi operation");
1358 Label use_fp_on_smis;
1364 case Token::BIT_XOR:
1366 __ xor_(right, left);
1369 case Token::BIT_AND:
1371 __ and_(right, left);
1381 __ cmp(left, 0xc0000000);
1382 __ j(
sign, &use_fp_on_smis);
1411 __ test(left, Immediate(0xc0000000));
1420 __ add(right, left);
1425 __ sub(left, right);
1439 __ imul(right, left);
1442 __ NegativeZeroTest(right, combined, &use_fp_on_smis);
1450 __ test(right, right);
1451 __ j(
zero, &use_fp_on_smis);
1461 __ cmp(
eax, 0x40000000);
1464 __ NegativeZeroTest(
eax, combined, &use_fp_on_smis);
1474 __ test(right, right);
1483 __ NegativeZeroTest(
edx, combined, slow);
1502 case Token::BIT_AND:
1503 case Token::BIT_XOR:
1507 __ ret(2 * kPointerSize);
1516 if (allow_heapnumber_results == NO_HEAPNUMBER_RESULTS) {
1517 __ bind(&use_fp_on_smis);
1525 __ sub(right, left);
1529 __ add(left, right);
1546 ASSERT(allow_heapnumber_results == ALLOW_HEAPNUMBER_RESULTS);
1550 Comment perform_float(masm,
"-- Perform float operation on smis");
1551 __ bind(&use_fp_on_smis);
1558 if (op_ == Token::SHR) {
1559 __ mov(Operand(
esp, 1 * kPointerSize), left);
1560 __ mov(Operand(
esp, 2 * kPointerSize), Immediate(0));
1561 __ fild_d(Operand(
esp, 1 * kPointerSize));
1566 CpuFeatures::Scope use_sse2(
SSE2);
1570 __ mov(Operand(
esp, 1 * kPointerSize), left);
1571 __ fild_s(Operand(
esp, 1 * kPointerSize));
1575 __ ret(2 * kPointerSize);
1583 Comment perform_float(masm,
"-- Perform float operation on smis");
1584 __ bind(&use_fp_on_smis);
1589 __ sub(right, left);
1593 __ add(left, right);
1610 CpuFeatures::Scope use_sse2(
SSE2);
1611 FloatingPointHelper::LoadSSE2Smis(masm,
ebx);
1621 FloatingPointHelper::LoadFloatSmis(masm,
ebx);
1643 Comment done_comment(masm,
"-- Enter non-smi code");
1668 void BinaryOpStub::GenerateSmiStub(MacroAssembler* masm) {
1679 case Token::BIT_AND:
1680 case Token::BIT_XOR:
1684 GenerateRegisterArgsPush(masm);
1692 GenerateSmiCode(masm, &call_runtime, NO_HEAPNUMBER_RESULTS);
1694 GenerateSmiCode(masm, &call_runtime, ALLOW_HEAPNUMBER_RESULTS);
1696 __ bind(&call_runtime);
1702 GenerateTypeTransition(masm);
1706 case Token::BIT_AND:
1707 case Token::BIT_XOR:
1711 GenerateTypeTransitionWithSavedArgs(masm);
1719 void BinaryOpStub::GenerateStringStub(MacroAssembler* masm) {
1724 GenerateAddStrings(masm);
1725 GenerateTypeTransition(masm);
1729 void BinaryOpStub::GenerateBothStringStub(MacroAssembler* masm) {
1737 Register left =
edx;
1738 Register right =
eax;
1741 __ JumpIfSmi(left, &call_runtime, Label::kNear);
1746 __ JumpIfSmi(right, &call_runtime, Label::kNear);
1751 GenerateRegisterArgsPush(masm);
1752 __ TailCallStub(&string_add_stub);
1754 __ bind(&call_runtime);
1755 GenerateTypeTransition(masm);
1764 void BinaryOpStub::GenerateInt32Stub(MacroAssembler* masm) {
1778 CpuFeatures::Scope use_sse2(
SSE2);
1779 FloatingPointHelper::LoadSSE2Operands(masm, ¬_floats);
1780 FloatingPointHelper::CheckSSE2OperandsAreInt32(masm, ¬_int32,
ecx);
1781 if (op_ == Token::MOD) {
1782 GenerateRegisterArgsPush(masm);
1798 __ test(
ecx, Immediate(1));
1801 GenerateHeapResultAllocation(masm, &call_runtime);
1806 FloatingPointHelper::CheckFloatOperands(masm, ¬_floats,
ebx);
1807 FloatingPointHelper::LoadFloatOperands(
1810 FloatingPointHelper::ARGS_IN_REGISTERS);
1811 FloatingPointHelper::CheckFloatOperandsAreInt32(masm, ¬_int32);
1812 if (op_ == Token::MOD) {
1816 GenerateRegisterArgsPush(masm);
1826 Label after_alloc_failure;
1827 GenerateHeapResultAllocation(masm, &after_alloc_failure);
1830 __ bind(&after_alloc_failure);
1832 __ jmp(&call_runtime);
1836 __ bind(¬_floats);
1837 __ bind(¬_int32);
1838 GenerateTypeTransition(masm);
1843 case Token::BIT_AND:
1844 case Token::BIT_XOR:
1848 GenerateRegisterArgsPush(masm);
1851 Label non_smi_result;
1852 FloatingPointHelper::LoadUnknownsAsIntegers(masm,
1855 FloatingPointHelper::CheckLoadedIntegersWereInt32(masm, use_sse3_,
1858 case Token::BIT_OR:
__ or_(
eax,
ecx);
break;
1859 case Token::BIT_AND:
__ and_(
eax,
ecx);
break;
1860 case Token::BIT_XOR:
__ xor_(
eax,
ecx);
break;
1861 case Token::SAR:
__ sar_cl(
eax);
break;
1862 case Token::SHL:
__ shl_cl(
eax);
break;
1863 case Token::SHR:
__ shr_cl(
eax);
break;
1866 if (op_ == Token::SHR) {
1868 __ test(
eax, Immediate(0xc0000000));
1872 __ cmp(
eax, 0xc0000000);
1873 __ j(
negative, &non_smi_result, Label::kNear);
1877 __ ret(2 * kPointerSize);
1881 if (op_ != Token::SHR) {
1882 __ bind(&non_smi_result);
1885 Label skip_allocation;
1892 1 * kPointerSize : 2 * kPointerSize));
1893 __ JumpIfNotSmi(
eax, &skip_allocation, Label::kNear);
1896 __ AllocateHeapNumber(
eax,
ecx,
edx, &call_runtime);
1897 __ bind(&skip_allocation);
1903 CpuFeatures::Scope use_sse2(
SSE2);
1907 __ mov(Operand(
esp, 1 * kPointerSize),
ebx);
1908 __ fild_s(Operand(
esp, 1 * kPointerSize));
1911 __ ret(2 * kPointerSize);
1914 __ bind(¬_floats);
1915 __ bind(¬_int32);
1916 GenerateTypeTransitionWithSavedArgs(masm);
1924 __ bind(&call_runtime);
1928 GenerateRegisterArgsPush(masm);
1932 GenerateRegisterArgsPush(masm);
1936 GenerateRegisterArgsPush(masm);
1940 GenerateRegisterArgsPush(masm);
1948 case Token::BIT_AND:
1951 case Token::BIT_XOR:
1969 void BinaryOpStub::GenerateOddballStub(MacroAssembler* masm) {
1973 GenerateAddStrings(masm);
1976 Factory* factory = masm->isolate()->factory();
1980 __ cmp(
edx, factory->undefined_value());
1985 __ mov(
edx, Immediate(factory->nan_value()));
1987 __ jmp(&done, Label::kNear);
1989 __ cmp(
eax, factory->undefined_value());
1994 __ mov(
eax, Immediate(factory->nan_value()));
1998 GenerateHeapNumberStub(masm);
2002 void BinaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) {
2013 CpuFeatures::Scope use_sse2(
SSE2);
2014 FloatingPointHelper::LoadSSE2Operands(masm, ¬_floats);
2023 GenerateHeapResultAllocation(masm, &call_runtime);
2027 FloatingPointHelper::CheckFloatOperands(masm, ¬_floats,
ebx);
2028 FloatingPointHelper::LoadFloatOperands(
2031 FloatingPointHelper::ARGS_IN_REGISTERS);
2039 Label after_alloc_failure;
2040 GenerateHeapResultAllocation(masm, &after_alloc_failure);
2043 __ bind(&after_alloc_failure);
2045 __ jmp(&call_runtime);
2048 __ bind(¬_floats);
2049 GenerateTypeTransition(masm);
2058 case Token::BIT_AND:
2059 case Token::BIT_XOR:
2063 GenerateRegisterArgsPush(masm);
2065 Label non_smi_result;
2066 FloatingPointHelper::LoadUnknownsAsIntegers(masm,
2070 case Token::BIT_OR:
__ or_(
eax,
ecx);
break;
2071 case Token::BIT_AND:
__ and_(
eax,
ecx);
break;
2072 case Token::BIT_XOR:
__ xor_(
eax,
ecx);
break;
2073 case Token::SAR:
__ sar_cl(
eax);
break;
2074 case Token::SHL:
__ shl_cl(
eax);
break;
2075 case Token::SHR:
__ shr_cl(
eax);
break;
2078 if (op_ == Token::SHR) {
2080 __ test(
eax, Immediate(0xc0000000));
2084 __ cmp(
eax, 0xc0000000);
2085 __ j(
negative, &non_smi_result, Label::kNear);
2089 __ ret(2 * kPointerSize);
2093 if (op_ != Token::SHR) {
2094 __ bind(&non_smi_result);
2097 Label skip_allocation;
2104 1 * kPointerSize : 2 * kPointerSize));
2105 __ JumpIfNotSmi(
eax, &skip_allocation, Label::kNear);
2108 __ AllocateHeapNumber(
eax,
ecx,
edx, &call_runtime);
2109 __ bind(&skip_allocation);
2115 CpuFeatures::Scope use_sse2(
SSE2);
2119 __ mov(Operand(
esp, 1 * kPointerSize),
ebx);
2120 __ fild_s(Operand(
esp, 1 * kPointerSize));
2123 __ ret(2 * kPointerSize);
2126 __ bind(¬_floats);
2127 GenerateTypeTransitionWithSavedArgs(masm);
2135 __ bind(&call_runtime);
2139 GenerateRegisterArgsPush(masm);
2143 GenerateRegisterArgsPush(masm);
2147 GenerateRegisterArgsPush(masm);
2151 GenerateRegisterArgsPush(masm);
2155 GenerateRegisterArgsPush(masm);
2161 case Token::BIT_AND:
2164 case Token::BIT_XOR:
2182 void BinaryOpStub::GenerateGeneric(MacroAssembler* masm) {
2185 Counters* counters = masm->isolate()->counters();
2186 __ IncrementCounter(counters->generic_binary_stub_calls(), 1);
2196 case Token::BIT_AND:
2197 case Token::BIT_XOR:
2201 GenerateRegisterArgsPush(masm);
2207 GenerateSmiCode(masm, &call_runtime, ALLOW_HEAPNUMBER_RESULTS);
2217 CpuFeatures::Scope use_sse2(
SSE2);
2218 FloatingPointHelper::LoadSSE2Operands(masm, ¬_floats);
2227 GenerateHeapResultAllocation(masm, &call_runtime);
2231 FloatingPointHelper::CheckFloatOperands(masm, ¬_floats,
ebx);
2232 FloatingPointHelper::LoadFloatOperands(
2235 FloatingPointHelper::ARGS_IN_REGISTERS);
2243 Label after_alloc_failure;
2244 GenerateHeapResultAllocation(masm, &after_alloc_failure);
2247 __ bind(&after_alloc_failure);
2249 __ jmp(&call_runtime);
2251 __ bind(¬_floats);
2259 case Token::BIT_AND:
2260 case Token::BIT_XOR:
2264 Label non_smi_result;
2265 FloatingPointHelper::LoadUnknownsAsIntegers(masm,
2269 case Token::BIT_OR:
__ or_(
eax,
ecx);
break;
2270 case Token::BIT_AND:
__ and_(
eax,
ecx);
break;
2271 case Token::BIT_XOR:
__ xor_(
eax,
ecx);
break;
2272 case Token::SAR:
__ sar_cl(
eax);
break;
2273 case Token::SHL:
__ shl_cl(
eax);
break;
2274 case Token::SHR:
__ shr_cl(
eax);
break;
2277 if (op_ == Token::SHR) {
2279 __ test(
eax, Immediate(0xc0000000));
2283 __ cmp(
eax, 0xc0000000);
2284 __ j(
negative, &non_smi_result, Label::kNear);
2288 __ ret(2 * kPointerSize);
2292 if (op_ != Token::SHR) {
2293 __ bind(&non_smi_result);
2296 Label skip_allocation;
2303 1 * kPointerSize : 2 * kPointerSize));
2304 __ JumpIfNotSmi(
eax, &skip_allocation, Label::kNear);
2307 __ AllocateHeapNumber(
eax,
ecx,
edx, &call_runtime);
2308 __ bind(&skip_allocation);
2314 CpuFeatures::Scope use_sse2(
SSE2);
2318 __ mov(Operand(
esp, 1 * kPointerSize),
ebx);
2319 __ fild_s(Operand(
esp, 1 * kPointerSize));
2322 __ ret(2 * kPointerSize);
2331 __ bind(&call_runtime);
2334 GenerateAddStrings(masm);
2335 GenerateRegisterArgsPush(masm);
2340 GenerateRegisterArgsPush(masm);
2344 GenerateRegisterArgsPush(masm);
2348 GenerateRegisterArgsPush(masm);
2357 case Token::BIT_AND:
2360 case Token::BIT_XOR:
2378 void BinaryOpStub::GenerateAddStrings(MacroAssembler* masm) {
2380 Label left_not_string, call_runtime;
2383 Register left =
edx;
2384 Register right =
eax;
2387 __ JumpIfSmi(left, &left_not_string, Label::kNear);
2392 GenerateRegisterArgsPush(masm);
2393 __ TailCallStub(&string_add_left_stub);
2396 __ bind(&left_not_string);
2397 __ JumpIfSmi(right, &call_runtime, Label::kNear);
2402 GenerateRegisterArgsPush(masm);
2403 __ TailCallStub(&string_add_right_stub);
2406 __ bind(&call_runtime);
2410 void BinaryOpStub::GenerateHeapResultAllocation(
2411 MacroAssembler* masm,
2412 Label* alloc_failure) {
2413 Label skip_allocation;
2419 __ JumpIfNotSmi(
edx, &skip_allocation, Label::kNear);
2426 __ bind(&skip_allocation);
2434 __ JumpIfNotSmi(
eax, &skip_allocation, Label::kNear);
2443 __ bind(&skip_allocation);
2450 void BinaryOpStub::GenerateRegisterArgsPush(MacroAssembler* masm) {
2473 Label runtime_call_clear_stack;
2475 const bool tagged = (argument_type_ ==
TAGGED);
2478 Label input_not_smi;
2480 __ mov(
eax, Operand(
esp, kPointerSize));
2481 __ JumpIfNotSmi(
eax, &input_not_smi, Label::kNear);
2486 __ sub(
esp, Immediate(2 * kPointerSize));
2488 __ fild_s(Operand(
esp, 0));
2489 __ fst_d(Operand(
esp, 0));
2492 __ jmp(&loaded, Label::kNear);
2493 __ bind(&input_not_smi);
2496 Factory* factory = masm->isolate()->factory();
2497 __ cmp(
ebx, Immediate(factory->heap_number_map()));
2508 CpuFeatures::Scope sse4_scope(
SSE4_1);
2532 Immediate(TranscendentalCache::SubCache::kCacheSize - 1));
2538 ExternalReference cache_array =
2539 ExternalReference::transcendental_cache_array_address(masm->isolate());
2540 __ mov(
eax, Immediate(cache_array));
2541 int cache_array_index =
2542 type_ *
sizeof(masm->isolate()->transcendental_cache()->caches_[0]);
2543 __ mov(
eax, Operand(
eax, cache_array_index));
2547 __ j(
zero, &runtime_call_clear_stack);
2550 { TranscendentalCache::SubCache::Element test_elem[2];
2551 char* elem_start =
reinterpret_cast<char*
>(&test_elem[0]);
2552 char* elem2_start =
reinterpret_cast<char*
>(&test_elem[1]);
2553 char* elem_in0 =
reinterpret_cast<char*
>(&(test_elem[0].in[0]));
2554 char* elem_in1 =
reinterpret_cast<char*
>(&(test_elem[0].in[1]));
2555 char* elem_out =
reinterpret_cast<char*
>(&(test_elem[0].output));
2556 CHECK_EQ(12, elem2_start - elem_start);
2557 CHECK_EQ(0, elem_in0 - elem_start);
2572 Counters* counters = masm->isolate()->counters();
2573 __ IncrementCounter(counters->transcendental_cache_hit(), 1);
2577 __ ret(kPointerSize);
2583 __ bind(&cache_miss);
2584 __ IncrementCounter(counters->transcendental_cache_miss(), 1);
2589 __ AllocateHeapNumber(
eax,
edi,
no_reg, &runtime_call_clear_stack);
2594 __ fld_d(Operand(
esp, 0));
2603 __ ret(kPointerSize);
2609 __ bind(&skip_cache);
2612 __ fld_d(Operand(
esp, 0));
2614 __ fstp_d(Operand(
esp, 0));
2623 __ CallRuntimeSaveDoubles(Runtime::kAllocateInNewSpace);
2630 __ bind(&runtime_call_clear_stack);
2632 __ bind(&runtime_call);
2633 ExternalReference runtime =
2634 ExternalReference(RuntimeFunction(), masm->isolate());
2635 __ TailCallExternalReference(runtime, 1, 1);
2637 __ bind(&runtime_call_clear_stack);
2638 __ bind(&runtime_call);
2644 __ CallRuntime(RuntimeFunction(), 1);
2660 return Runtime::kAbort;
2677 Label in_range, done;
2681 __ and_(
edi, Immediate(0x7ff00000));
2682 int supported_exponent_limit =
2684 __ cmp(
edi, Immediate(supported_exponent_limit));
2685 __ j(
below, &in_range, Label::kNear);
2687 __ cmp(
edi, Immediate(0x7ff00000));
2688 Label non_nan_result;
2693 __ push(Immediate(0x7ff80000));
2694 __ push(Immediate(0));
2695 __ fld_d(Operand(
esp, 0));
2696 __ add(
esp, Immediate(2 * kPointerSize));
2697 __ jmp(&done, Label::kNear);
2699 __ bind(&non_nan_result);
2708 Label no_exceptions;
2712 __ test(
eax, Immediate(5));
2713 __ j(
zero, &no_exceptions, Label::kNear);
2715 __ bind(&no_exceptions);
2720 Label partial_remainder_loop;
2721 __ bind(&partial_remainder_loop);
2725 __ test(
eax, Immediate(0x400 ));
2765 void FloatingPointHelper::LoadUnknownsAsIntegers(MacroAssembler* masm,
2767 Label* conversion_failure) {
2769 Label arg1_is_object, check_undefined_arg1;
2770 Label arg2_is_object, check_undefined_arg2;
2771 Label load_arg2, done;
2774 __ JumpIfNotSmi(
edx, &arg1_is_object, Label::kNear);
2780 __ bind(&check_undefined_arg1);
2781 Factory* factory = masm->isolate()->factory();
2782 __ cmp(
edx, factory->undefined_value());
2784 __ mov(
edx, Immediate(0));
2787 __ bind(&arg1_is_object);
2789 __ cmp(
ebx, factory->heap_number_map());
2793 IntegerConvert(masm,
edx, use_sse3, conversion_failure);
2797 __ bind(&load_arg2);
2800 __ JumpIfNotSmi(
eax, &arg2_is_object, Label::kNear);
2807 __ bind(&check_undefined_arg2);
2808 __ cmp(
eax, factory->undefined_value());
2810 __ mov(
ecx, Immediate(0));
2813 __ bind(&arg2_is_object);
2815 __ cmp(
ebx, factory->heap_number_map());
2819 IntegerConvert(masm,
eax, use_sse3, conversion_failure);
2825 void FloatingPointHelper::CheckLoadedIntegersWereInt32(MacroAssembler* masm,
2832 void FloatingPointHelper::LoadFloatOperand(MacroAssembler* masm,
2834 Label load_smi, done;
2836 __ JumpIfSmi(number, &load_smi, Label::kNear);
2838 __ jmp(&done, Label::kNear);
2841 __ SmiUntag(number);
2843 __ fild_s(Operand(
esp, 0));
2850 void FloatingPointHelper::LoadSSE2Operands(MacroAssembler* masm) {
2851 Label load_smi_edx, load_eax, load_smi_eax, done;
2853 __ JumpIfSmi(
edx, &load_smi_edx, Label::kNear);
2858 __ JumpIfSmi(
eax, &load_smi_eax, Label::kNear);
2860 __ jmp(&done, Label::kNear);
2862 __ bind(&load_smi_edx);
2868 __ bind(&load_smi_eax);
2877 void FloatingPointHelper::LoadSSE2Operands(MacroAssembler* masm,
2878 Label* not_numbers) {
2879 Label load_smi_edx, load_eax, load_smi_eax, load_float_eax, done;
2881 __ JumpIfSmi(
edx, &load_smi_edx, Label::kNear);
2882 Factory* factory = masm->isolate()->factory();
2888 __ JumpIfSmi(
eax, &load_smi_eax, Label::kNear);
2890 __ j(
equal, &load_float_eax, Label::kNear);
2891 __ jmp(not_numbers);
2892 __ bind(&load_smi_edx);
2897 __ bind(&load_smi_eax);
2901 __ jmp(&done, Label::kNear);
2902 __ bind(&load_float_eax);
2908 void FloatingPointHelper::LoadSSE2Smis(MacroAssembler* masm,
2910 const Register left =
edx;
2911 const Register right =
eax;
2912 __ mov(scratch, left);
2913 ASSERT(!scratch.is(right));
2914 __ SmiUntag(scratch);
2915 __ cvtsi2sd(
xmm0, scratch);
2917 __ mov(scratch, right);
2918 __ SmiUntag(scratch);
2919 __ cvtsi2sd(
xmm1, scratch);
2923 void FloatingPointHelper::CheckSSE2OperandsAreInt32(MacroAssembler* masm,
2926 __ cvttsd2si(scratch, Operand(
xmm0));
2927 __ cvtsi2sd(
xmm2, scratch);
2931 __ cvttsd2si(scratch, Operand(
xmm1));
2932 __ cvtsi2sd(
xmm2, scratch);
2939 void FloatingPointHelper::LoadFloatOperands(MacroAssembler* masm,
2941 ArgLocation arg_location) {
2942 Label load_smi_1, load_smi_2, done_load_1, done;
2943 if (arg_location == ARGS_IN_REGISTERS) {
2944 __ mov(scratch,
edx);
2946 __ mov(scratch, Operand(
esp, 2 * kPointerSize));
2948 __ JumpIfSmi(scratch, &load_smi_1, Label::kNear);
2950 __ bind(&done_load_1);
2952 if (arg_location == ARGS_IN_REGISTERS) {
2953 __ mov(scratch,
eax);
2955 __ mov(scratch, Operand(
esp, 1 * kPointerSize));
2957 __ JumpIfSmi(scratch, &load_smi_2, Label::kNear);
2959 __ jmp(&done, Label::kNear);
2961 __ bind(&load_smi_1);
2962 __ SmiUntag(scratch);
2964 __ fild_s(Operand(
esp, 0));
2966 __ jmp(&done_load_1);
2968 __ bind(&load_smi_2);
2969 __ SmiUntag(scratch);
2971 __ fild_s(Operand(
esp, 0));
2978 void FloatingPointHelper::LoadFloatSmis(MacroAssembler* masm,
2980 const Register left =
edx;
2981 const Register right =
eax;
2982 __ mov(scratch, left);
2983 ASSERT(!scratch.is(right));
2984 __ SmiUntag(scratch);
2986 __ fild_s(Operand(
esp, 0));
2988 __ mov(scratch, right);
2989 __ SmiUntag(scratch);
2990 __ mov(Operand(
esp, 0), scratch);
2991 __ fild_s(Operand(
esp, 0));
2996 void FloatingPointHelper::CheckFloatOperands(MacroAssembler* masm,
2999 Label test_other, done;
3002 __ JumpIfSmi(
edx, &test_other, Label::kNear);
3004 Factory* factory = masm->isolate()->factory();
3005 __ cmp(scratch, factory->heap_number_map());
3008 __ bind(&test_other);
3009 __ JumpIfSmi(
eax, &done, Label::kNear);
3011 __ cmp(scratch, factory->heap_number_map());
3019 void FloatingPointHelper::CheckFloatOperandsAreInt32(MacroAssembler* masm,
3026 CpuFeatures::Scope use_sse2(
SSE2);
3027 Factory* factory = masm->isolate()->factory();
3028 const Register exponent =
eax;
3029 const Register base =
edx;
3030 const Register scratch =
ecx;
3031 const XMMRegister double_result =
xmm3;
3032 const XMMRegister double_base =
xmm2;
3033 const XMMRegister double_exponent =
xmm1;
3034 const XMMRegister double_scratch =
xmm4;
3036 Label call_runtime, done, exponent_not_smi, int_exponent;
3039 __ mov(scratch, Immediate(1));
3040 __ cvtsi2sd(double_result, scratch);
3043 Label base_is_smi, unpack_exponent;
3047 __ mov(base, Operand(
esp, 2 * kPointerSize));
3048 __ mov(exponent, Operand(
esp, 1 * kPointerSize));
3050 __ JumpIfSmi(base, &base_is_smi, Label::kNear);
3052 factory->heap_number_map());
3056 __ jmp(&unpack_exponent, Label::kNear);
3058 __ bind(&base_is_smi);
3060 __ cvtsi2sd(double_base, base);
3062 __ bind(&unpack_exponent);
3063 __ JumpIfNotSmi(exponent, &exponent_not_smi, Label::kNear);
3064 __ SmiUntag(exponent);
3065 __ jmp(&int_exponent);
3067 __ bind(&exponent_not_smi);
3069 factory->heap_number_map());
3071 __ movdbl(double_exponent,
3073 }
else if (exponent_type_ ==
TAGGED) {
3074 __ JumpIfNotSmi(exponent, &exponent_not_smi, Label::kNear);
3075 __ SmiUntag(exponent);
3076 __ jmp(&int_exponent);
3078 __ bind(&exponent_not_smi);
3079 __ movdbl(double_exponent,
3083 if (exponent_type_ !=
INTEGER) {
3086 __ cvttsd2si(exponent, Operand(double_exponent));
3088 __ cmp(exponent, Immediate(0x80000000u));
3090 __ cvtsi2sd(double_scratch, exponent);
3092 __ ucomisd(double_exponent, double_scratch);
3099 Label continue_sqrt, continue_rsqrt, not_plus_half;
3102 __ mov(scratch, Immediate(0x3F000000u));
3103 __ movd(double_scratch, scratch);
3104 __ cvtss2sd(double_scratch, double_scratch);
3106 __ ucomisd(double_scratch, double_exponent);
3113 __ mov(scratch, 0xFF800000u);
3114 __ movd(double_scratch, scratch);
3115 __ cvtss2sd(double_scratch, double_scratch);
3116 __ ucomisd(double_base, double_scratch);
3120 __ j(
carry, &continue_sqrt, Label::kNear);
3123 __ xorps(double_result, double_result);
3124 __ subsd(double_result, double_scratch);
3127 __ bind(&continue_sqrt);
3129 __ xorps(double_scratch, double_scratch);
3130 __ addsd(double_scratch, double_base);
3131 __ sqrtsd(double_result, double_scratch);
3135 __ bind(¬_plus_half);
3137 __ subsd(double_scratch, double_result);
3139 __ ucomisd(double_scratch, double_exponent);
3146 __ mov(scratch, 0xFF800000u);
3147 __ movd(double_scratch, scratch);
3148 __ cvtss2sd(double_scratch, double_scratch);
3149 __ ucomisd(double_base, double_scratch);
3153 __ j(
carry, &continue_rsqrt, Label::kNear);
3156 __ xorps(double_result, double_result);
3159 __ bind(&continue_rsqrt);
3161 __ xorps(double_exponent, double_exponent);
3162 __ addsd(double_exponent, double_base);
3163 __ sqrtsd(double_exponent, double_exponent);
3164 __ divsd(double_result, double_exponent);
3169 Label fast_power_failed;
3170 __ bind(&fast_power);
3174 __ movdbl(Operand(
esp, 0), double_exponent);
3175 __ fld_d(Operand(
esp, 0));
3176 __ movdbl(Operand(
esp, 0), double_base);
3177 __ fld_d(Operand(
esp, 0));
3196 __ test_b(
eax, 0x5F);
3197 __ j(
not_zero, &fast_power_failed, Label::kNear);
3198 __ fstp_d(Operand(
esp, 0));
3199 __ movdbl(double_result, Operand(
esp, 0));
3203 __ bind(&fast_power_failed);
3206 __ jmp(&call_runtime);
3210 __ bind(&int_exponent);
3211 const XMMRegister double_scratch2 = double_exponent;
3212 __ mov(scratch, exponent);
3213 __ movsd(double_scratch, double_base);
3214 __ movsd(double_scratch2, double_result);
3217 Label no_neg, while_true, while_false;
3218 __ test(scratch, scratch);
3223 __ j(
zero, &while_false, Label::kNear);
3227 __ j(
above, &while_true, Label::kNear);
3228 __ movsd(double_result, double_scratch);
3229 __ j(
zero, &while_false, Label::kNear);
3231 __ bind(&while_true);
3233 __ mulsd(double_scratch, double_scratch);
3234 __ j(
above, &while_true, Label::kNear);
3235 __ mulsd(double_result, double_scratch);
3238 __ bind(&while_false);
3241 __ test(exponent, exponent);
3243 __ divsd(double_scratch2, double_result);
3244 __ movsd(double_result, double_scratch2);
3247 __ xorps(double_scratch2, double_scratch2);
3248 __ ucomisd(double_scratch2, double_result);
3253 __ cvtsi2sd(double_exponent, exponent);
3256 Counters* counters = masm->isolate()->counters();
3259 __ bind(&call_runtime);
3260 __ TailCallRuntime(Runtime::kMath_pow_cfunction, 2, 1);
3265 __ AllocateHeapNumber(
eax, scratch, base, &call_runtime);
3267 __ IncrementCounter(counters->math_pow(), 1);
3268 __ ret(2 * kPointerSize);
3270 __ bind(&call_runtime);
3272 AllowExternalCallThatCantCauseGC scope(masm);
3273 __ PrepareCallCFunction(4, scratch);
3277 ExternalReference::power_double_double_function(masm->isolate()), 4);
3282 __ fstp_d(Operand(
esp, 0));
3283 __ movdbl(double_result, Operand(
esp, 0));
3287 __ IncrementCounter(counters->math_pow(), 1);
3293 void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
3303 __ JumpIfNotSmi(
edx, &slow, Label::kNear);
3310 __ j(
equal, &adaptor, Label::kNear);
3348 __ TailCallRuntime(Runtime::kGetArgumentsProperty, 1, 1);
3352 void ArgumentsAccessStub::GenerateNewNonStrictSlow(MacroAssembler* masm) {
3367 __ mov(Operand(
esp, 1 * kPointerSize),
ecx);
3370 __ mov(Operand(
esp, 2 * kPointerSize),
edx);
3373 __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1);
3377 void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) {
3384 __ mov(
ebx, Operand(
esp, 1 * kPointerSize));
3390 Label adaptor_frame, try_allocate;
3394 __ j(
equal, &adaptor_frame, Label::kNear);
3398 __ jmp(&try_allocate, Label::kNear);
3401 __ bind(&adaptor_frame);
3405 __ mov(Operand(
esp, 2 * kPointerSize),
edx);
3416 __ bind(&try_allocate);
3423 const int kParameterMapHeaderSize =
3425 Label no_parameter_map;
3427 __ j(
zero, &no_parameter_map, Label::kNear);
3429 __ bind(&no_parameter_map);
3446 Label has_mapped_parameters, copy;
3449 __ mov(
ebx, Operand(
esp, 0 * kPointerSize));
3451 __ j(
not_zero, &has_mapped_parameters, Label::kNear);
3454 __ jmp(©, Label::kNear);
3456 __ bind(&has_mapped_parameters);
3476 __ mov(
edx, Operand(
esp, 4 * kPointerSize));
3504 Label skip_parameter_map;
3506 __ j(
zero, &skip_parameter_map);
3509 Immediate(
FACTORY->non_strict_arguments_elements_map()));
3524 Label parameters_loop, parameters_test;
3526 __ mov(
eax, Operand(
esp, 2 * kPointerSize));
3528 __ add(
ebx, Operand(
esp, 4 * kPointerSize));
3543 __ jmp(¶meters_test, Label::kNear);
3545 __ bind(¶meters_loop);
3550 __ bind(¶meters_test);
3552 __ j(
not_zero, ¶meters_loop, Label::kNear);
3555 __ bind(&skip_parameter_map);
3565 Immediate(
FACTORY->fixed_array_map()));
3568 Label arguments_loop, arguments_test;
3569 __ mov(
ebx, Operand(
esp, 1 * kPointerSize));
3570 __ mov(
edx, Operand(
esp, 4 * kPointerSize));
3573 __ jmp(&arguments_test, Label::kNear);
3575 __ bind(&arguments_loop);
3576 __ sub(
edx, Immediate(kPointerSize));
3581 __ bind(&arguments_test);
3583 __ j(
less, &arguments_loop, Label::kNear);
3590 __ ret(3 * kPointerSize);
3595 __ mov(Operand(
esp, 1 * kPointerSize),
ecx);
3596 __ TailCallRuntime(Runtime::kNewStrictArgumentsFast, 3, 1);
3600 void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
3607 Label adaptor_frame, try_allocate, runtime;
3611 __ j(
equal, &adaptor_frame, Label::kNear);
3614 __ mov(
ecx, Operand(
esp, 1 * kPointerSize));
3615 __ jmp(&try_allocate, Label::kNear);
3618 __ bind(&adaptor_frame);
3620 __ mov(Operand(
esp, 1 * kPointerSize),
ecx);
3623 __ mov(Operand(
esp, 2 * kPointerSize),
edx);
3627 Label add_arguments_object;
3628 __ bind(&try_allocate);
3630 __ j(
zero, &add_arguments_object, Label::kNear);
3632 __ bind(&add_arguments_object);
3653 __ mov(
ecx, Operand(
esp, 1 * kPointerSize));
3661 __ j(
zero, &done, Label::kNear);
3664 __ mov(
edx, Operand(
esp, 2 * kPointerSize));
3671 Immediate(
FACTORY->fixed_array_map()));
3680 __ mov(
ebx, Operand(
edx, -1 * kPointerSize));
3682 __ add(
edi, Immediate(kPointerSize));
3683 __ sub(
edx, Immediate(kPointerSize));
3689 __ ret(3 * kPointerSize);
3693 __ TailCallRuntime(Runtime::kNewStrictArgumentsFast, 3, 1);
3697 void RegExpExecStub::Generate(MacroAssembler* masm) {
3701 #ifdef V8_INTERPRETED_REGEXP
3702 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
3703 #else // V8_INTERPRETED_REGEXP
3712 static const int kLastMatchInfoOffset = 1 *
kPointerSize;
3713 static const int kPreviousIndexOffset = 2 *
kPointerSize;
3717 Label runtime, invoke_regexp;
3720 ExternalReference address_of_regexp_stack_memory_address =
3721 ExternalReference::address_of_regexp_stack_memory_address(
3723 ExternalReference address_of_regexp_stack_memory_size =
3724 ExternalReference::address_of_regexp_stack_memory_size(masm->isolate());
3725 __ mov(
ebx, Operand::StaticVariable(address_of_regexp_stack_memory_size));
3730 __ mov(
eax, Operand(
esp, kJSRegExpOffset));
3732 __ JumpIfSmi(
eax, &runtime);
3737 if (FLAG_debug_code) {
3739 __ Check(
not_zero,
"Unexpected type for RegExp data, FixedArray expected");
3741 __ Check(
equal,
"Unexpected type for RegExp data, FixedArray expected");
3757 __ add(
edx, Immediate(2));
3765 __ mov(
eax, Operand(
esp, kSubjectOffset));
3766 __ JumpIfSmi(
eax, &runtime);
3777 __ mov(
eax, Operand(
esp, kPreviousIndexOffset));
3778 __ JumpIfNotSmi(
eax, &runtime);
3785 __ mov(
eax, Operand(
esp, kLastMatchInfoOffset));
3786 __ JumpIfSmi(
eax, &runtime);
3792 Factory* factory = masm->isolate()->factory();
3793 __ cmp(
eax, factory->fixed_array_map());
3804 __ Set(
edi, Immediate(0));
3807 Label seq_ascii_string, seq_two_byte_string, check_code;
3808 __ mov(
eax, Operand(
esp, kSubjectOffset));
3817 __ j(
zero, &seq_two_byte_string, Label::kNear);
3824 __ j(
zero, &seq_ascii_string, Label::kNear);
3833 Label cons_string, external_string, check_encoding;
3839 __ j(
less, &cons_string);
3840 __ j(
equal, &external_string);
3852 __ jmp(&check_encoding, Label::kNear);
3854 __ bind(&cons_string);
3858 __ bind(&check_encoding);
3866 __ j(
zero, &seq_two_byte_string, Label::kNear);
3872 __ bind(&seq_ascii_string);
3876 __ Set(
ecx, Immediate(1));
3877 __ jmp(&check_code, Label::kNear);
3879 __ bind(&seq_two_byte_string);
3883 __ Set(
ecx, Immediate(0));
3885 __ bind(&check_code);
3889 __ JumpIfSmi(
edx, &runtime);
3896 __ mov(
ebx, Operand(
esp, kPreviousIndexOffset));
3904 Counters* counters = masm->isolate()->counters();
3905 __ IncrementCounter(counters->regexp_entry_native(), 1);
3908 static const int kRegExpExecuteArguments = 9;
3909 __ EnterApiExitFrame(kRegExpExecuteArguments);
3912 __ mov(Operand(
esp, 8 * kPointerSize),
3913 Immediate(ExternalReference::isolate_address()));
3916 __ mov(Operand(
esp, 7 * kPointerSize), Immediate(1));
3919 __ mov(
esi, Operand::StaticVariable(address_of_regexp_stack_memory_address));
3920 __ add(
esi, Operand::StaticVariable(address_of_regexp_stack_memory_size));
3921 __ mov(Operand(
esp, 6 * kPointerSize),
esi);
3925 __ mov(Operand(
esp, 5 * kPointerSize), Immediate(0));
3928 __ mov(Operand(
esp, 4 * kPointerSize),
3929 Immediate(ExternalReference::address_of_static_offsets_vector(
3933 __ mov(Operand(
esp, 1 * kPointerSize),
ebx);
3940 __ mov(
esi, Operand(
ebp, kSubjectOffset + kPointerSize));
3941 __ mov(Operand(
esp, 0 * kPointerSize),
esi);
3959 Label setup_two_byte, setup_rest;
3961 __ j(
zero, &setup_two_byte, Label::kNear);
3964 __ mov(Operand(
esp, 3 * kPointerSize),
ecx);
3966 __ mov(Operand(
esp, 2 * kPointerSize),
ecx);
3967 __ jmp(&setup_rest, Label::kNear);
3969 __ bind(&setup_two_byte);
3973 __ mov(Operand(
esp, 3 * kPointerSize),
ecx);
3975 __ mov(Operand(
esp, 2 * kPointerSize),
ecx);
3977 __ bind(&setup_rest);
3984 __ LeaveApiExitFrame();
4002 ExternalReference pending_exception(Isolate::kPendingExceptionAddress,
4004 __ mov(
edx, Immediate(masm->isolate()->factory()->the_hole_value()));
4005 __ mov(
eax, Operand::StaticVariable(pending_exception));
4011 __ mov(Operand::StaticVariable(pending_exception),
edx);
4015 __ cmp(
eax, factory->termination_exception());
4016 Label throw_termination_exception;
4017 __ j(
equal, &throw_termination_exception, Label::kNear);
4022 __ bind(&throw_termination_exception);
4023 __ ThrowUncatchable(
eax);
4027 __ mov(
eax, factory->null_value());
4028 __ ret(4 * kPointerSize);
4032 __ mov(
eax, Operand(
esp, kJSRegExpOffset));
4038 __ add(
edx, Immediate(2));
4042 __ mov(
eax, Operand(
esp, kLastMatchInfoOffset));
4052 __ mov(
eax, Operand(
esp, kSubjectOffset));
4054 __ RecordWriteField(
ebx,
4059 __ mov(
eax, Operand(
esp, kSubjectOffset));
4061 __ RecordWriteField(
ebx,
4068 ExternalReference address_of_static_offsets_vector =
4069 ExternalReference::address_of_static_offsets_vector(masm->isolate());
4070 __ mov(
ecx, Immediate(address_of_static_offsets_vector));
4075 Label next_capture, done;
4078 __ bind(&next_capture);
4079 __ sub(
edx, Immediate(1));
4090 __ jmp(&next_capture);
4094 __ mov(
eax, Operand(
esp, kLastMatchInfoOffset));
4095 __ ret(4 * kPointerSize);
4100 __ bind(&external_string);
4103 if (FLAG_debug_code) {
4107 __ Assert(
zero,
"external string expected, but not found");
4116 __ jmp(&seq_two_byte_string);
4120 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
4121 #endif // V8_INTERPRETED_REGEXP
4125 void RegExpConstructResultStub::Generate(MacroAssembler* masm) {
4126 const int kMaxInlineLength = 100;
4129 __ mov(
ebx, Operand(
esp, kPointerSize * 3));
4130 __ JumpIfNotSmi(
ebx, &slowcase);
4154 Factory* factory = masm->isolate()->factory();
4155 __ mov(
ecx, Immediate(factory->empty_fixed_array()));
4164 __ mov(
ecx, Operand(
esp, kPointerSize * 1));
4166 __ mov(
ecx, Operand(
esp, kPointerSize * 2));
4168 __ mov(
ecx, Operand(
esp, kPointerSize * 3));
4178 Immediate(factory->fixed_array_map()));
4183 __ mov(
edx, Immediate(factory->undefined_value()));
4194 __ sub(
ecx, Immediate(1));
4199 __ ret(3 * kPointerSize);
4202 __ TailCallRuntime(Runtime::kRegExpConstructResult, 3, 1);
4214 Register number_string_cache = result;
4215 Register mask = scratch1;
4216 Register scratch = scratch2;
4219 ExternalReference roots_array_start =
4220 ExternalReference::roots_array_start(masm->isolate());
4221 __ mov(scratch, Immediate(Heap::kNumberStringCacheRootIndex));
4222 __ mov(number_string_cache,
4228 __ sub(mask, Immediate(1));
4234 Label smi_hash_calculated;
4235 Label load_result_from_cache;
4236 if (object_is_smi) {
4237 __ mov(scratch,
object);
4238 __ SmiUntag(scratch);
4242 __ JumpIfNotSmi(
object, ¬_smi, Label::kNear);
4243 __ mov(scratch,
object);
4244 __ SmiUntag(scratch);
4245 __ jmp(&smi_hash_calculated, Label::kNear);
4248 masm->isolate()->factory()->heap_number_map());
4254 __ and_(scratch, mask);
4255 Register index = scratch;
4256 Register probe = mask;
4262 __ JumpIfSmi(probe, not_found);
4264 CpuFeatures::Scope fscope(
SSE2);
4275 __ jmp(&load_result_from_cache, Label::kNear);
4278 __ bind(&smi_hash_calculated);
4280 __ and_(scratch, mask);
4281 Register index = scratch;
4291 __ bind(&load_result_from_cache);
4297 Counters* counters = masm->isolate()->counters();
4298 __ IncrementCounter(counters->number_to_string_native(), 1);
4302 void NumberToStringStub::Generate(MacroAssembler* masm) {
4305 __ mov(
ebx, Operand(
esp, kPointerSize));
4309 __ ret(1 * kPointerSize);
4313 __ TailCallRuntime(Runtime::kNumberToStringSkipCache, 1, 1);
4317 static int NegativeComparisonResult(
Condition cc) {
4327 Label check_unequal_objects;
4330 if (include_smi_compare_) {
4331 Label non_smi, smi_done;
4334 __ JumpIfNotSmi(
ecx, &non_smi, Label::kNear);
4342 }
else if (FLAG_debug_code) {
4346 __ Assert(
not_zero,
"Unexpected smi operands.");
4355 Label not_identical;
4362 Label check_for_nan;
4363 __ cmp(
edx, masm->isolate()->factory()->undefined_value());
4367 __ bind(&check_for_nan);
4373 if (never_nan_nan_ && (cc_ ==
equal)) {
4379 Immediate(masm->isolate()->factory()->heap_number_map()));
4380 __ j(
equal, &heap_number, Label::kNear);
4389 __ bind(&heap_number);
4402 __ Set(
eax, Immediate(0));
4422 __ bind(¬_identical);
4427 if (cc_ ==
equal && strict_) {
4447 __ sub(
ecx, Immediate(0x01));
4456 Immediate(masm->isolate()->factory()->heap_number_map()));
4458 __ j(
equal, &slow, Label::kNear);
4470 Label first_non_object;
4473 __ j(
below, &first_non_object, Label::kNear);
4476 Label return_not_equal;
4478 __ bind(&return_not_equal);
4481 __ bind(&first_non_object);
4484 __ j(
equal, &return_not_equal);
4491 __ j(
equal, &return_not_equal);
4498 if (include_number_compare_) {
4499 Label non_number_comparison;
4502 CpuFeatures::Scope use_sse2(
SSE2);
4503 CpuFeatures::Scope use_cmov(
CMOV);
4505 FloatingPointHelper::LoadSSE2Operands(masm, &non_number_comparison);
4518 FloatingPointHelper::CheckFloatOperands(
4519 masm, &non_number_comparison,
ebx);
4520 FloatingPointHelper::LoadFloatOperand(masm,
eax);
4521 FloatingPointHelper::LoadFloatOperand(masm,
edx);
4527 Label below_label, above_label;
4529 __ j(
below, &below_label, Label::kNear);
4530 __ j(
above, &above_label, Label::kNear);
4532 __ Set(
eax, Immediate(0));
4535 __ bind(&below_label);
4539 __ bind(&above_label);
4546 __ bind(&unordered);
4556 __ bind(&non_number_comparison);
4560 Label check_for_strings;
4562 BranchIfNonSymbol(masm, &check_for_strings,
eax,
ecx);
4563 BranchIfNonSymbol(masm, &check_for_strings,
edx,
ecx);
4571 __ bind(&check_for_strings);
4574 &check_unequal_objects);
4592 __ Abort(
"Unexpected fall-through from string comparison");
4595 __ bind(&check_unequal_objects);
4596 if (cc_ ==
equal && !strict_) {
4600 Label not_both_objects;
4601 Label return_unequal;
4609 __ j(
not_zero, ¬_both_objects, Label::kNear);
4611 __ j(
below, ¬_both_objects, Label::kNear);
4613 __ j(
below, ¬_both_objects, Label::kNear);
4619 __ j(
zero, &return_unequal, Label::kNear);
4622 __ j(
zero, &return_unequal, Label::kNear);
4626 __ bind(&return_unequal);
4630 __ bind(¬_both_objects);
4641 builtin = strict_ ? Builtins::STRICT_EQUALS : Builtins::EQUALS;
4656 void CompareStub::BranchIfNonSymbol(MacroAssembler* masm,
4660 __ JumpIfSmi(
object, label);
4670 __ TailCallRuntime(Runtime::kStackGuard, 0, 1);
4675 __ TailCallRuntime(Runtime::kInterrupt, 0, 1);
4679 static void GenerateRecordCallTarget(MacroAssembler* masm) {
4685 Isolate* isolate = masm->isolate();
4686 Label initialize, done;
4694 __ j(
equal, &done, Label::kNear);
4696 __ j(
equal, &done, Label::kNear);
4701 __ j(
equal, &initialize, Label::kNear);
4706 __ jmp(&done, Label::kNear);
4709 __ bind(&initialize);
4720 Isolate* isolate = masm->isolate();
4721 Label slow, non_function;
4726 if (ReceiverMightBeImplicit()) {
4730 __ mov(
eax, Operand(
esp, (argc_ + 1) * kPointerSize));
4732 __ cmp(
eax, isolate->factory()->the_hole_value());
4737 __ mov(Operand(
esp, (argc_ + 1) * kPointerSize),
ecx);
4738 __ bind(&receiver_ok);
4742 __ JumpIfSmi(
edi, &non_function);
4747 if (RecordCallTarget()) {
4748 GenerateRecordCallTarget(masm);
4752 ParameterCount actual(argc_);
4754 if (ReceiverMightBeImplicit()) {
4755 Label call_as_function;
4756 __ cmp(
eax, isolate->factory()->the_hole_value());
4757 __ j(
equal, &call_as_function);
4758 __ InvokeFunction(
edi,
4763 __ bind(&call_as_function);
4765 __ InvokeFunction(
edi,
4773 if (RecordCallTarget()) {
4786 __ Set(
eax, Immediate(argc_ + 1));
4787 __ Set(
ebx, Immediate(0));
4789 __ GetBuiltinEntry(
edx, Builtins::CALL_FUNCTION_PROXY);
4791 Handle<Code> adaptor = isolate->builtins()->ArgumentsAdaptorTrampoline();
4792 __ jmp(adaptor, RelocInfo::CODE_TARGET);
4797 __ bind(&non_function);
4798 __ mov(Operand(
esp, (argc_ + 1) * kPointerSize),
edi);
4799 __ Set(
eax, Immediate(argc_));
4800 __ Set(
ebx, Immediate(0));
4802 __ GetBuiltinEntry(
edx, Builtins::CALL_NON_FUNCTION);
4803 Handle<Code> adaptor = isolate->builtins()->ArgumentsAdaptorTrampoline();
4804 __ jmp(adaptor, RelocInfo::CODE_TARGET);
4812 Label slow, non_function_call;
4815 __ JumpIfSmi(
edi, &non_function_call);
4820 if (RecordCallTarget()) {
4821 GenerateRecordCallTarget(masm);
4837 __ GetBuiltinEntry(
edx, Builtins::CALL_FUNCTION_PROXY_AS_CONSTRUCTOR);
4840 __ bind(&non_function_call);
4841 __ GetBuiltinEntry(
edx, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR);
4844 __ Set(
ebx, Immediate(0));
4845 Handle<Code> arguments_adaptor =
4846 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline();
4848 __ jmp(arguments_adaptor, RelocInfo::CODE_TARGET);
4852 bool CEntryStub::NeedsImmovableCode() {
4858 return (!save_doubles_ ||
ISOLATE->fp_stubs_generated()) &&
4863 void CodeStub::GenerateStubsAheadOfTime() {
4871 void CodeStub::GenerateFPStubs() {
4873 Handle<Code>
code = save_doubles.GetCode();
4874 code->set_is_pregenerated(
true);
4875 code->GetIsolate()->set_fp_stubs_generated(
true);
4881 Handle<Code> code = stub.GetCode();
4882 code->set_is_pregenerated(
true);
4886 void CEntryStub::GenerateCore(MacroAssembler* masm,
4887 Label* throw_normal_exception,
4888 Label* throw_termination_exception,
4889 Label* throw_out_of_memory_exception,
4891 bool always_allocate_scope) {
4902 if (FLAG_debug_code) {
4903 __ CheckStackAlignment();
4912 __ mov(Operand(
esp, 0 * kPointerSize),
eax);
4916 ExternalReference scope_depth =
4917 ExternalReference::heap_always_allocate_scope_depth(masm->isolate());
4918 if (always_allocate_scope) {
4919 __ inc(Operand::StaticVariable(scope_depth));
4923 __ mov(Operand(
esp, 0 * kPointerSize),
edi);
4924 __ mov(Operand(
esp, 1 * kPointerSize),
esi);
4925 __ mov(Operand(
esp, 2 * kPointerSize),
4926 Immediate(ExternalReference::isolate_address()));
4930 if (always_allocate_scope) {
4931 __ dec(Operand::StaticVariable(scope_depth));
4936 if (FLAG_debug_code) {
4938 __ cmp(
eax, masm->isolate()->factory()->the_hole_value());
4945 Label failure_returned;
4950 __ j(
zero, &failure_returned);
4952 ExternalReference pending_exception_address(
4953 Isolate::kPendingExceptionAddress, masm->isolate());
4957 if (FLAG_debug_code) {
4959 __ mov(
edx, Immediate(masm->isolate()->factory()->the_hole_value()));
4961 __ cmp(
edx, Operand::StaticVariable(pending_exception_address));
4963 __ j(
equal, &okay, Label::kNear);
4974 __ bind(&failure_returned);
4980 __ j(
zero, &retry, Label::kNear);
4984 __ j(
equal, throw_out_of_memory_exception);
4987 __ mov(
eax, Operand::StaticVariable(pending_exception_address));
4988 __ mov(
edx, Immediate(masm->isolate()->factory()->the_hole_value()));
4989 __ mov(Operand::StaticVariable(pending_exception_address),
edx);
4993 __ cmp(
eax, masm->isolate()->factory()->termination_exception());
4994 __ j(
equal, throw_termination_exception);
4997 __ jmp(throw_normal_exception);
5026 Label throw_normal_exception;
5027 Label throw_termination_exception;
5028 Label throw_out_of_memory_exception;
5032 &throw_normal_exception,
5033 &throw_termination_exception,
5034 &throw_out_of_memory_exception,
5040 &throw_normal_exception,
5041 &throw_termination_exception,
5042 &throw_out_of_memory_exception,
5048 __ mov(
eax, Immediate(reinterpret_cast<int32_t>(failure)));
5050 &throw_normal_exception,
5051 &throw_termination_exception,
5052 &throw_out_of_memory_exception,
5056 __ bind(&throw_out_of_memory_exception);
5058 Isolate* isolate = masm->isolate();
5059 ExternalReference external_caught(Isolate::kExternalCaughtExceptionAddress,
5061 __ mov(Operand::StaticVariable(external_caught), Immediate(
false));
5064 ExternalReference pending_exception(Isolate::kPendingExceptionAddress,
5067 __ mov(Operand::StaticVariable(pending_exception),
eax);
5070 __ bind(&throw_termination_exception);
5071 __ ThrowUncatchable(
eax);
5073 __ bind(&throw_normal_exception);
5079 Label invoke, handler_entry, exit;
5080 Label not_outermost_js, not_outermost_js_2;
5087 int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY;
5096 ExternalReference c_entry_fp(Isolate::kCEntryFPAddress, masm->isolate());
5097 __ push(Operand::StaticVariable(c_entry_fp));
5100 ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress,
5102 __ cmp(Operand::StaticVariable(js_entry_sp), Immediate(0));
5104 __ mov(Operand::StaticVariable(js_entry_sp),
ebp);
5105 __ push(Immediate(
Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
5106 __ jmp(&invoke, Label::kNear);
5107 __ bind(¬_outermost_js);
5108 __ push(Immediate(
Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME)));
5113 __ bind(&handler_entry);
5114 handler_offset_ = handler_entry.pos();
5117 ExternalReference pending_exception(Isolate::kPendingExceptionAddress,
5119 __ mov(Operand::StaticVariable(pending_exception),
eax);
5126 __ PushTryHandler(StackHandler::JS_ENTRY, 0);
5129 __ mov(
edx, Immediate(masm->isolate()->factory()->the_hole_value()));
5130 __ mov(Operand::StaticVariable(pending_exception),
edx);
5133 __ push(Immediate(0));
5140 ExternalReference construct_entry(Builtins::kJSConstructEntryTrampoline,
5142 __ mov(
edx, Immediate(construct_entry));
5144 ExternalReference entry(Builtins::kJSEntryTrampoline,
5146 __ mov(
edx, Immediate(entry));
5160 __ mov(Operand::StaticVariable(js_entry_sp), Immediate(0));
5161 __ bind(¬_outermost_js_2);
5164 __ pop(Operand::StaticVariable(ExternalReference(
5165 Isolate::kCEntryFPAddress,
5172 __ add(
esp, Immediate(2 * kPointerSize));
5198 ASSERT(HasArgsInRegisters() || !HasCallSiteInlineCheck());
5201 Register
object =
eax;
5203 Register
function =
edx;
5204 Register prototype =
edi;
5205 Register scratch =
ecx;
5208 static const int kDeltaToCmpImmediate = 2;
5209 static const int kDeltaToMov = 8;
5210 static const int kDeltaToMovImmediate = 9;
5211 static const int8_t kCmpEdiOperandByte1 = BitCast<int8_t, uint8_t>(0x3b);
5212 static const int8_t kCmpEdiOperandByte2 = BitCast<int8_t, uint8_t>(0x3d);
5213 static const int8_t kMovEaxImmediateByte = BitCast<int8_t, uint8_t>(0xb8);
5215 ExternalReference roots_array_start =
5216 ExternalReference::roots_array_start(masm->isolate());
5222 Label slow, not_js_object;
5223 if (!HasArgsInRegisters()) {
5224 __ mov(
object, Operand(
esp, 2 * kPointerSize));
5225 __ mov(
function, Operand(
esp, 1 * kPointerSize));
5229 __ JumpIfSmi(
object, ¬_js_object);
5230 __ IsObjectJSObjectType(
object, map, scratch, ¬_js_object);
5234 if (!HasCallSiteInlineCheck()) {
5237 __ mov(scratch, Immediate(Heap::kInstanceofCacheFunctionRootIndex));
5238 __ cmp(
function, Operand::StaticArray(scratch,
5240 roots_array_start));
5242 __ mov(scratch, Immediate(Heap::kInstanceofCacheMapRootIndex));
5243 __ cmp(map, Operand::StaticArray(
5246 __ mov(scratch, Immediate(Heap::kInstanceofCacheAnswerRootIndex));
5247 __ mov(
eax, Operand::StaticArray(
5249 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
5254 __ TryGetFunctionPrototype(
function, prototype, scratch, &slow,
true);
5257 __ JumpIfSmi(prototype, &slow);
5258 __ IsObjectJSObjectType(prototype, scratch, scratch, &slow);
5262 if (!HasCallSiteInlineCheck()) {
5263 __ mov(scratch, Immediate(Heap::kInstanceofCacheMapRootIndex));
5266 __ mov(scratch, Immediate(Heap::kInstanceofCacheFunctionRootIndex));
5272 ASSERT(HasArgsInRegisters());
5274 __ mov(scratch, Operand(
esp, 0 * kPointerSize));
5275 __ sub(scratch, Operand(
esp, 1 * kPointerSize));
5276 if (FLAG_debug_code) {
5277 __ cmpb(Operand(scratch, 0), kCmpEdiOperandByte1);
5278 __ Assert(
equal,
"InstanceofStub unexpected call site cache (cmp 1)");
5279 __ cmpb(Operand(scratch, 1), kCmpEdiOperandByte2);
5280 __ Assert(
equal,
"InstanceofStub unexpected call site cache (cmp 2)");
5282 __ mov(scratch, Operand(scratch, kDeltaToCmpImmediate));
5283 __ mov(Operand(scratch, 0), map);
5289 Label loop, is_instance, is_not_instance;
5291 __ cmp(scratch, prototype);
5292 __ j(
equal, &is_instance, Label::kNear);
5293 Factory* factory = masm->isolate()->factory();
5294 __ cmp(scratch, Immediate(factory->null_value()));
5295 __ j(
equal, &is_not_instance, Label::kNear);
5300 __ bind(&is_instance);
5301 if (!HasCallSiteInlineCheck()) {
5302 __ Set(
eax, Immediate(0));
5303 __ mov(scratch, Immediate(Heap::kInstanceofCacheAnswerRootIndex));
5304 __ mov(Operand::StaticArray(scratch,
5308 __ mov(
eax, factory->true_value());
5309 __ mov(scratch, Operand(
esp, 0 * kPointerSize));
5310 __ sub(scratch, Operand(
esp, 1 * kPointerSize));
5311 if (FLAG_debug_code) {
5312 __ cmpb(Operand(scratch, kDeltaToMov), kMovEaxImmediateByte);
5313 __ Assert(
equal,
"InstanceofStub unexpected call site cache (mov)");
5315 __ mov(Operand(scratch, kDeltaToMovImmediate),
eax);
5316 if (!ReturnTrueFalseObject()) {
5317 __ Set(
eax, Immediate(0));
5320 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
5322 __ bind(&is_not_instance);
5323 if (!HasCallSiteInlineCheck()) {
5325 __ mov(scratch, Immediate(Heap::kInstanceofCacheAnswerRootIndex));
5326 __ mov(Operand::StaticArray(
5330 __ mov(
eax, factory->false_value());
5331 __ mov(scratch, Operand(
esp, 0 * kPointerSize));
5332 __ sub(scratch, Operand(
esp, 1 * kPointerSize));
5333 if (FLAG_debug_code) {
5334 __ cmpb(Operand(scratch, kDeltaToMov), kMovEaxImmediateByte);
5335 __ Assert(
equal,
"InstanceofStub unexpected call site cache (mov)");
5337 __ mov(Operand(scratch, kDeltaToMovImmediate),
eax);
5338 if (!ReturnTrueFalseObject()) {
5342 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
5344 Label object_not_null, object_not_null_or_smi;
5345 __ bind(¬_js_object);
5348 __ JumpIfSmi(
function, &slow, Label::kNear);
5353 __ cmp(
object, factory->null_value());
5356 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
5358 __ bind(&object_not_null);
5360 __ JumpIfNotSmi(
object, &object_not_null_or_smi, Label::kNear);
5362 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
5364 __ bind(&object_not_null_or_smi);
5366 Condition is_string = masm->IsObjectStringType(
object, scratch, scratch);
5369 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
5373 if (!ReturnTrueFalseObject()) {
5375 if (HasArgsInRegisters()) {
5391 Label true_value, done;
5393 __ j(
zero, &true_value, Label::kNear);
5394 __ mov(
eax, factory->false_value());
5395 __ jmp(&done, Label::kNear);
5396 __ bind(&true_value);
5397 __ mov(
eax, factory->true_value());
5399 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
5410 int CompareStub::MinorKey() {
5414 ASSERT(static_cast<unsigned>(cc_) < (1 << 12));
5416 return ConditionField::encode(static_cast<unsigned>(cc_))
5417 | RegisterField::encode(
false)
5418 | StrictField::encode(strict_)
5419 | NeverNanNanField::encode(cc_ ==
equal ? never_nan_nan_ :
false)
5420 | IncludeNumberCompareField::encode(include_number_compare_)
5421 | IncludeSmiCompareField::encode(include_smi_compare_);
5427 void CompareStub::PrintName(StringStream* stream) {
5429 const char* cc_name;
5431 case less: cc_name =
"LT";
break;
5432 case greater: cc_name =
"GT";
break;
5435 case equal: cc_name =
"EQ";
break;
5437 default: cc_name =
"UnknownCondition";
break;
5440 stream->Add(
"CompareStub_%s", cc_name);
5441 if (strict_ && is_equality) stream->Add(
"_STRICT");
5442 if (never_nan_nan_ && is_equality) stream->Add(
"_NO_NAN");
5443 if (!include_number_compare_) stream->Add(
"_NO_NUMBER");
5444 if (!include_smi_compare_) stream->Add(
"_NO_SMI");
5454 __ JumpIfSmi(object_, receiver_not_string_);
5465 __ JumpIfNotSmi(index_, &index_not_smi_);
5466 __ bind(&got_smi_index_);
5472 __ SmiUntag(index_);
5474 Factory* factory = masm->isolate()->factory();
5476 masm, factory, object_, index_, result_, &call_runtime_);
5484 MacroAssembler* masm,
5485 const RuntimeCallHelper& call_helper) {
5486 __ Abort(
"Unexpected fallthrough to CharCodeAt slow case");
5489 __ bind(&index_not_smi_);
5492 masm->isolate()->factory()->heap_number_map(),
5495 call_helper.BeforeCall(masm);
5499 __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero, 1);
5503 __ CallRuntime(Runtime::kNumberToSmi, 1);
5505 if (!index_.
is(
eax)) {
5508 __ mov(index_,
eax);
5514 call_helper.AfterCall(masm);
5517 __ JumpIfNotSmi(index_, index_out_of_range_);
5519 __ jmp(&got_smi_index_);
5524 __ bind(&call_runtime_);
5525 call_helper.BeforeCall(masm);
5529 __ CallRuntime(Runtime::kStringCharCodeAt, 2);
5530 if (!result_.
is(
eax)) {
5531 __ mov(result_,
eax);
5533 call_helper.AfterCall(masm);
5536 __ Abort(
"Unexpected fallthrough from CharCodeAt slow case");
5553 Factory* factory = masm->isolate()->factory();
5554 __ Set(result_, Immediate(factory->single_character_string_cache()));
5562 __ cmp(result_, factory->undefined_value());
5569 MacroAssembler* masm,
5570 const RuntimeCallHelper& call_helper) {
5571 __ Abort(
"Unexpected fallthrough to CharFromCode slow case");
5573 __ bind(&slow_case_);
5574 call_helper.BeforeCall(masm);
5576 __ CallRuntime(Runtime::kCharFromCode, 1);
5577 if (!result_.
is(
eax)) {
5578 __ mov(result_,
eax);
5580 call_helper.AfterCall(masm);
5583 __ Abort(
"Unexpected fallthrough from CharFromCode slow case");
5597 MacroAssembler* masm,
5598 const RuntimeCallHelper& call_helper) {
5599 char_code_at_generator_.
GenerateSlow(masm, call_helper);
5600 char_from_code_generator_.
GenerateSlow(masm, call_helper);
5604 void StringAddStub::Generate(MacroAssembler* masm) {
5605 Label call_runtime, call_builtin;
5609 __ mov(
eax, Operand(
esp, 2 * kPointerSize));
5610 __ mov(
edx, Operand(
esp, 1 * kPointerSize));
5614 __ JumpIfSmi(
eax, &call_runtime);
5619 __ JumpIfSmi(
edx, &call_runtime);
5627 GenerateConvertArgument(masm, 2 * kPointerSize,
eax,
ebx,
ecx,
edi,
5629 builtin_id = Builtins::STRING_ADD_RIGHT;
5631 ASSERT((flags_ & NO_STRING_CHECK_LEFT_IN_STUB) != 0);
5632 GenerateConvertArgument(masm, 1 * kPointerSize,
edx,
ebx,
ecx,
edi,
5634 builtin_id = Builtins::STRING_ADD_LEFT;
5642 Label second_not_zero_length, both_not_zero_length;
5646 __ j(
not_zero, &second_not_zero_length, Label::kNear);
5648 Counters* counters = masm->isolate()->counters();
5649 __ IncrementCounter(counters->string_add_native(), 1);
5650 __ ret(2 * kPointerSize);
5651 __ bind(&second_not_zero_length);
5655 __ j(
not_zero, &both_not_zero_length, Label::kNear);
5658 __ IncrementCounter(counters->string_add_native(), 1);
5659 __ ret(2 * kPointerSize);
5667 Label string_add_flat_result, longer_than_two;
5668 __ bind(&both_not_zero_length);
5679 __ JumpIfNotBothSequentialAsciiStrings(
eax,
edx,
ebx,
ecx, &call_runtime);
5687 Label make_two_character_string, make_two_character_string_no_reload;
5690 &make_two_character_string_no_reload, &make_two_character_string);
5691 __ IncrementCounter(counters->string_add_native(), 1);
5692 __ ret(2 * kPointerSize);
5695 __ bind(&make_two_character_string);
5697 __ mov(
eax, Operand(
esp, 2 * kPointerSize));
5698 __ mov(
edx, Operand(
esp, 1 * kPointerSize));
5702 __ bind(&make_two_character_string_no_reload);
5703 __ IncrementCounter(counters->string_add_make_two_char(), 1);
5704 __ AllocateAsciiString(
eax, 2,
edi,
edx, &call_runtime);
5710 __ IncrementCounter(counters->string_add_native(), 1);
5711 __ ret(2 * kPointerSize);
5713 __ bind(&longer_than_two);
5716 __ j(
below, &string_add_flat_result);
5720 Label non_ascii, allocated, ascii_data;
5730 __ bind(&ascii_data);
5733 __ bind(&allocated);
5742 __ IncrementCounter(counters->string_add_native(), 1);
5743 __ ret(2 * kPointerSize);
5744 __ bind(&non_ascii);
5769 Label first_prepared, second_prepared;
5770 Label first_is_sequential, second_is_sequential;
5771 __ bind(&string_add_flat_result);
5777 __ j(
zero, &first_is_sequential, Label::kNear);
5784 __ jmp(&first_prepared, Label::kNear);
5785 __ bind(&first_is_sequential);
5787 __ bind(&first_prepared);
5798 __ j(
zero, &second_is_sequential, Label::kNear);
5805 __ jmp(&second_prepared, Label::kNear);
5806 __ bind(&second_is_sequential);
5808 __ bind(&second_prepared);
5814 Label non_ascii_string_add_flat_result, call_runtime_drop_two;
5819 __ j(
zero, &non_ascii_string_add_flat_result);
5831 __ mov(
edx, Operand(
esp, 4 * kPointerSize));
5842 __ mov(
edx, Operand(
esp, 2 * kPointerSize));
5851 __ IncrementCounter(counters->string_add_native(), 1);
5852 __ ret(2 * kPointerSize);
5858 __ bind(&non_ascii_string_add_flat_result);
5868 __ mov(
edx, Operand(
esp, 4 * kPointerSize));
5879 __ mov(
edx, Operand(
esp, 2 * kPointerSize));
5888 __ IncrementCounter(counters->string_add_native(), 1);
5889 __ ret(2 * kPointerSize);
5892 __ bind(&call_runtime_drop_two);
5895 __ bind(&call_runtime);
5896 __ TailCallRuntime(Runtime::kStringAdd, 2, 1);
5898 if (call_builtin.is_linked()) {
5899 __ bind(&call_builtin);
5905 void StringAddStub::GenerateConvertArgument(MacroAssembler* masm,
5913 Label not_string, done;
5914 __ JumpIfSmi(arg, ¬_string);
5920 __ bind(¬_string);
5929 __ mov(arg, scratch1);
5930 __ mov(Operand(
esp, stack_offset), arg);
5934 __ bind(¬_cached);
5935 __ JumpIfSmi(arg, slow);
5942 __ mov(Operand(
esp, stack_offset), arg);
5959 __ mov_b(scratch, Operand(src, 0));
5960 __ mov_b(Operand(dest, 0), scratch);
5961 __ add(src, Immediate(1));
5962 __ add(dest, Immediate(1));
5964 __ mov_w(scratch, Operand(src, 0));
5965 __ mov_w(Operand(dest, 0), scratch);
5966 __ add(src, Immediate(2));
5967 __ add(dest, Immediate(2));
5969 __ sub(count, Immediate(1));
5986 ASSERT(!scratch.is(dest));
5987 ASSERT(!scratch.is(src));
5988 ASSERT(!scratch.is(count));
5992 __ test(count, count);
6002 __ test(count, Immediate(~3));
6003 __ j(
zero, &last_bytes, Label::kNear);
6006 __ mov(scratch, count);
6012 __ mov(count, scratch);
6016 __ bind(&last_bytes);
6017 __ test(count, count);
6023 __ mov_b(scratch, Operand(src, 0));
6024 __ mov_b(Operand(dest, 0), scratch);
6025 __ add(src, Immediate(1));
6026 __ add(dest, Immediate(1));
6027 __ sub(count, Immediate(1));
6043 Register scratch = scratch3;
6047 Label not_array_index;
6048 __ mov(scratch, c1);
6049 __ sub(scratch, Immediate(static_cast<int>(
'0')));
6050 __ cmp(scratch, Immediate(static_cast<int>(
'9' -
'0')));
6051 __ j(
above, ¬_array_index, Label::kNear);
6052 __ mov(scratch, c2);
6053 __ sub(scratch, Immediate(static_cast<int>(
'0')));
6054 __ cmp(scratch, Immediate(static_cast<int>(
'9' -
'0')));
6057 __ bind(¬_array_index);
6059 Register hash = scratch1;
6065 Register chars = c1;
6073 Register symbol_table = c2;
6074 ExternalReference roots_array_start =
6075 ExternalReference::roots_array_start(masm->isolate());
6077 __ mov(symbol_table,
6081 Register mask = scratch2;
6084 __ sub(mask, Immediate(1));
6094 static const int kProbes = 4;
6095 Label found_in_symbol_table;
6096 Label next_probe[kProbes], next_probe_pop_mask[kProbes];
6097 Register candidate = scratch;
6098 for (
int i = 0; i < kProbes; i++) {
6100 __ mov(scratch, hash);
6102 __ add(scratch, Immediate(SymbolTable::GetProbeOffset(i)));
6104 __ and_(scratch, mask);
6115 Factory* factory = masm->isolate()->factory();
6116 __ cmp(candidate, factory->undefined_value());
6118 __ cmp(candidate, factory->the_hole_value());
6129 Register temp = mask;
6134 __ JumpIfInstanceTypeIsNotSequentialAscii(
6135 temp, temp, &next_probe_pop_mask[i]);
6139 __ and_(temp, 0x0000ffff);
6140 __ cmp(chars, temp);
6141 __ j(
equal, &found_in_symbol_table);
6142 __ bind(&next_probe_pop_mask[i]);
6144 __ bind(&next_probe[i]);
6151 Register result = candidate;
6152 __ bind(&found_in_symbol_table);
6154 if (!result.is(
eax)) {
6155 __ mov(
eax, result);
6166 ExternalReference roots_array_start =
6167 ExternalReference::roots_array_start(masm->isolate());
6168 __ mov(scratch, Immediate(Heap::kHashSeedRootIndex));
6169 __ mov(scratch, Operand::StaticArray(scratch,
6171 roots_array_start));
6172 __ SmiUntag(scratch);
6173 __ add(scratch, character);
6174 __ mov(hash, scratch);
6175 __ shl(scratch, 10);
6176 __ add(hash, scratch);
6178 int32_t seed = masm->isolate()->heap()->HashSeed();
6179 __ lea(scratch, Operand(character, seed));
6180 __ shl(scratch, 10);
6181 __ lea(hash, Operand(scratch, character,
times_1, seed));
6184 __ mov(scratch, hash);
6186 __ xor_(hash, scratch);
6195 __ add(hash, character);
6197 __ mov(scratch, hash);
6198 __ shl(scratch, 10);
6199 __ add(hash, scratch);
6201 __ mov(scratch, hash);
6203 __ xor_(hash, scratch);
6211 __ mov(scratch, hash);
6213 __ add(hash, scratch);
6215 __ mov(scratch, hash);
6216 __ shr(scratch, 11);
6217 __ xor_(hash, scratch);
6219 __ mov(scratch, hash);
6220 __ shl(scratch, 15);
6221 __ add(hash, scratch);
6226 Label hash_not_zero;
6227 __ j(
not_zero, &hash_not_zero, Label::kNear);
6229 __ bind(&hash_not_zero);
6233 void SubStringStub::Generate(MacroAssembler* masm) {
6243 __ mov(
eax, Operand(
esp, 3 * kPointerSize));
6245 __ JumpIfSmi(
eax, &runtime);
6253 __ mov(
ecx, Operand(
esp, 1 * kPointerSize));
6254 __ JumpIfNotSmi(
ecx, &runtime);
6255 __ mov(
edx, Operand(
esp, 2 * kPointerSize));
6256 __ JumpIfNotSmi(
edx, &runtime);
6259 Label not_original_string;
6261 __ j(
below, ¬_original_string, Label::kNear);
6265 Counters* counters = masm->isolate()->counters();
6266 __ IncrementCounter(counters->sub_string_native(), 1);
6267 __ ret(3 * kPointerSize);
6268 __ bind(¬_original_string);
6276 Label underlying_unpacked, sliced_string, seq_or_external_string;
6281 __ j(
zero, &seq_or_external_string, Label::kNear);
6283 Factory* factory = masm->isolate()->factory();
6285 __ j(
not_zero, &sliced_string, Label::kNear);
6289 factory->empty_string());
6295 __ jmp(&underlying_unpacked, Label::kNear);
6297 __ bind(&sliced_string);
6304 __ jmp(&underlying_unpacked, Label::kNear);
6306 __ bind(&seq_or_external_string);
6310 __ bind(&underlying_unpacked);
6312 if (FLAG_string_slices) {
6320 __ j(
less, ©_routine);
6326 Label two_byte_slice, set_slice_header;
6330 __ j(
zero, &two_byte_slice, Label::kNear);
6332 __ jmp(&set_slice_header, Label::kNear);
6333 __ bind(&two_byte_slice);
6335 __ bind(&set_slice_header);
6341 __ IncrementCounter(counters->sub_string_native(), 1);
6342 __ ret(3 * kPointerSize);
6344 __ bind(©_routine);
6353 Label two_byte_sequential, runtime_drop_two, sequential_string;
6357 __ j(
zero, &sequential_string);
6369 __ bind(&sequential_string);
6376 __ j(
zero, &two_byte_sequential);
6400 __ IncrementCounter(counters->sub_string_native(), 1);
6401 __ ret(3 * kPointerSize);
6403 __ bind(&two_byte_sequential);
6430 __ IncrementCounter(counters->sub_string_native(), 1);
6431 __ ret(3 * kPointerSize);
6434 __ bind(&runtime_drop_two);
6439 __ TailCallRuntime(Runtime::kSubString, 3, 1);
6447 Register scratch2) {
6448 Register length = scratch1;
6451 Label strings_not_equal, check_zero_length;
6454 __ j(
equal, &check_zero_length, Label::kNear);
6455 __ bind(&strings_not_equal);
6460 Label compare_chars;
6461 __ bind(&check_zero_length);
6463 __ test(length, length);
6464 __ j(
not_zero, &compare_chars, Label::kNear);
6469 __ bind(&compare_chars);
6470 GenerateAsciiCharsCompareLoop(masm, left, right, length, scratch2,
6471 &strings_not_equal, Label::kNear);
6484 Register scratch3) {
6485 Counters* counters = masm->isolate()->counters();
6486 __ IncrementCounter(counters->string_compare_native(), 1);
6491 __ mov(scratch3, scratch1);
6494 Register length_delta = scratch3;
6498 __ sub(scratch1, length_delta);
6499 __ bind(&left_shorter);
6501 Register min_length = scratch1;
6504 Label compare_lengths;
6505 __ test(min_length, min_length);
6506 __ j(
zero, &compare_lengths, Label::kNear);
6509 Label result_not_equal;
6510 GenerateAsciiCharsCompareLoop(masm, left, right, min_length, scratch2,
6511 &result_not_equal, Label::kNear);
6514 __ bind(&compare_lengths);
6515 __ test(length_delta, length_delta);
6516 __ j(
not_zero, &result_not_equal, Label::kNear);
6524 Label result_greater;
6525 __ bind(&result_not_equal);
6526 __ j(
greater, &result_greater, Label::kNear);
6533 __ bind(&result_greater);
6539 void StringCompareStub::GenerateAsciiCharsCompareLoop(
6540 MacroAssembler* masm,
6545 Label* chars_not_equal,
6546 Label::Distance chars_not_equal_near) {
6550 __ SmiUntag(length);
6556 Register index = length;
6561 __ mov_b(scratch, Operand(left, index,
times_1, 0));
6562 __ cmpb(scratch, Operand(right, index,
times_1, 0));
6563 __ j(
not_equal, chars_not_equal, chars_not_equal_near);
6569 void StringCompareStub::Generate(MacroAssembler* masm) {
6577 __ mov(
edx, Operand(
esp, 2 * kPointerSize));
6578 __ mov(
eax, Operand(
esp, 1 * kPointerSize));
6586 __ IncrementCounter(masm->isolate()->counters()->string_compare_native(), 1);
6587 __ ret(2 * kPointerSize);
6592 __ JumpIfNotBothSequentialAsciiStrings(
edx,
eax,
ecx,
ebx, &runtime);
6597 __ add(
esp, Immediate(2 * kPointerSize));
6604 __ TailCallRuntime(Runtime::kStringCompare, 2, 1);
6608 void ICCompareStub::GenerateSmis(MacroAssembler* masm) {
6613 __ JumpIfNotSmi(
ecx, &miss, Label::kNear);
6615 if (GetCondition() ==
equal) {
6634 void ICCompareStub::GenerateHeapNumbers(MacroAssembler* masm) {
6638 Label unordered, maybe_undefined1, maybe_undefined2;
6642 __ JumpIfSmi(
ecx, &generic_stub, Label::kNear);
6652 CpuFeatures::Scope scope1(
SSE2);
6653 CpuFeatures::Scope scope2(
CMOV);
6675 __ bind(&unordered);
6677 __ bind(&generic_stub);
6678 __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET);
6680 __ bind(&maybe_undefined1);
6682 __ cmp(
eax, Immediate(masm->isolate()->factory()->undefined_value()));
6689 __ bind(&maybe_undefined2);
6691 __ cmp(
edx, Immediate(masm->isolate()->factory()->undefined_value()));
6700 void ICCompareStub::GenerateSymbols(MacroAssembler* masm) {
6705 Register left =
edx;
6706 Register right =
eax;
6707 Register tmp1 =
ecx;
6708 Register tmp2 =
ebx;
6714 __ and_(tmp1, right);
6715 __ JumpIfSmi(tmp1, &miss, Label::kNear);
6723 __ and_(tmp1, tmp2);
6725 __ j(
zero, &miss, Label::kNear);
6729 __ cmp(left, right);
6745 void ICCompareStub::GenerateStrings(MacroAssembler* masm) {
6752 Register left =
edx;
6753 Register right =
eax;
6754 Register tmp1 =
ecx;
6755 Register tmp2 =
ebx;
6756 Register tmp3 =
edi;
6761 __ and_(tmp1, right);
6762 __ JumpIfSmi(tmp1, &miss);
6778 __ cmp(left, right);
6794 __ and_(tmp1, tmp2);
6796 __ j(
zero, &do_compare, Label::kNear);
6801 __ bind(&do_compare);
6806 __ JumpIfNotBothSequentialAsciiStrings(left, right, tmp1, tmp2, &runtime);
6811 masm, left, right, tmp1, tmp2);
6814 masm, left, right, tmp1, tmp2, tmp3);
6824 __ TailCallRuntime(Runtime::kStringEquals, 2, 1);
6826 __ TailCallRuntime(Runtime::kStringCompare, 2, 1);
6834 void ICCompareStub::GenerateObjects(MacroAssembler* masm) {
6839 __ JumpIfSmi(
ecx, &miss, Label::kNear);
6855 void ICCompareStub::GenerateKnownObjects(MacroAssembler* masm) {
6859 __ JumpIfSmi(
ecx, &miss, Label::kNear);
6863 __ cmp(
ecx, known_map_);
6865 __ cmp(
ebx, known_map_);
6876 void ICCompareStub::GenerateMiss(MacroAssembler* masm) {
6879 ExternalReference miss = ExternalReference(IC_Utility(IC::kCompareIC_Miss),
6887 __ CallExternalReference(miss, 3);
6907 Register properties,
6908 Handle<String> name,
6910 ASSERT(name->IsSymbol());
6917 for (
int i = 0; i < kInlinedProbes; i++) {
6919 Register index =
r0;
6925 StringDictionary::GetProbeOffset(i))));
6929 __ lea(index, Operand(index, index,
times_2, 0));
6930 Register entity_name =
r0;
6935 __ cmp(entity_name, masm->isolate()->factory()->undefined_value());
6939 __ cmp(entity_name, Handle<String>(name));
6944 __ cmp(entity_name, masm->isolate()->factory()->the_hole_value());
6945 __ j(
equal, &the_hole, Label::kNear);
6959 __ push(Immediate(Handle<Object>(name)));
6960 __ push(Immediate(name->Hash()));
6979 ASSERT(!elements.is(r0));
6980 ASSERT(!elements.is(r1));
6984 __ AssertString(name);
6993 for (
int i = 0; i < kInlinedProbes; i++) {
6998 __ add(r0, Immediate(StringDictionary::GetProbeOffset(i)));
7004 __ lea(r0, Operand(r0, r0,
times_2, 0));
7007 __ cmp(name, Operand(elements,
7045 Label in_dictionary, maybe_in_dictionary, not_in_dictionary;
7047 Register scratch = result_;
7051 __ SmiUntag(scratch);
7059 for (
int i = kInlinedProbes; i < kTotalProbes; i++) {
7061 __ mov(scratch, Operand(
esp, 2 * kPointerSize));
7063 __ add(scratch, Immediate(StringDictionary::GetProbeOffset(i)));
7065 __ and_(scratch, Operand(
esp, 0));
7069 __ lea(index_, Operand(scratch, scratch,
times_2, 0));
7073 __ mov(scratch, Operand(dictionary_,
7077 __ cmp(scratch, masm->isolate()->factory()->undefined_value());
7078 __ j(
equal, ¬_in_dictionary);
7081 __ cmp(scratch, Operand(
esp, 3 * kPointerSize));
7093 __ j(
zero, &maybe_in_dictionary);
7097 __ bind(&maybe_in_dictionary);
7102 __ mov(result_, Immediate(0));
7104 __ ret(2 * kPointerSize);
7107 __ bind(&in_dictionary);
7108 __ mov(result_, Immediate(1));
7110 __ ret(2 * kPointerSize);
7112 __ bind(¬_in_dictionary);
7113 __ mov(result_, Immediate(0));
7115 __ ret(2 * kPointerSize);
7119 struct AheadOfTimeWriteBarrierStubList {
7120 Register object, value, address;
7125 #define REG(Name) { kRegister_ ## Name ## _Code }
7127 static const AheadOfTimeWriteBarrierStubList kAheadOfTime[] = {
7167 for (
const AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime;
7168 !entry->object.is(
no_reg);
7170 if (object_.
is(entry->object) &&
7171 value_.
is(entry->value) &&
7172 address_.
is(entry->address) &&
7173 remembered_set_action_ == entry->action &&
7184 stub1.GetCode()->set_is_pregenerated(
true);
7186 CpuFeatures::TryForceFeatureScope scope(
SSE2);
7189 stub2.GetCode()->set_is_pregenerated(
true);
7195 for (
const AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime;
7196 !entry->object.is(
no_reg);
7203 stub.GetCode()->set_is_pregenerated(
true);
7208 bool CodeStub::CanUseFPRegisters() {
7217 void RecordWriteStub::Generate(MacroAssembler* masm) {
7218 Label skip_to_incremental_noncompacting;
7219 Label skip_to_incremental_compacting;
7225 __ jmp(&skip_to_incremental_noncompacting, Label::kNear);
7226 __ jmp(&skip_to_incremental_compacting, Label::kFar);
7229 __ RememberedSetHelper(object_,
7238 __ bind(&skip_to_incremental_noncompacting);
7241 __ bind(&skip_to_incremental_compacting);
7251 void RecordWriteStub::GenerateIncremental(MacroAssembler* masm, Mode mode) {
7255 Label dont_need_remembered_set;
7257 __ mov(regs_.scratch0(), Operand(regs_.address(), 0));
7258 __ JumpIfNotInNewSpace(regs_.scratch0(),
7260 &dont_need_remembered_set);
7262 __ CheckPageFlag(regs_.object(),
7266 &dont_need_remembered_set);
7270 CheckNeedsToInformIncrementalMarker(
7272 kUpdateRememberedSetOnNoNeedToInformIncrementalMarker,
7274 InformIncrementalMarker(masm, mode);
7275 regs_.Restore(masm);
7276 __ RememberedSetHelper(object_,
7282 __ bind(&dont_need_remembered_set);
7285 CheckNeedsToInformIncrementalMarker(
7287 kReturnOnNoNeedToInformIncrementalMarker,
7289 InformIncrementalMarker(masm, mode);
7290 regs_.Restore(masm);
7295 void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm, Mode mode) {
7296 regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode_);
7297 int argument_count = 3;
7298 __ PrepareCallCFunction(argument_count, regs_.scratch0());
7299 __ mov(Operand(
esp, 0 * kPointerSize), regs_.object());
7301 __ mov(Operand(
esp, 1 * kPointerSize), regs_.address());
7304 __ mov(regs_.scratch0(), Operand(regs_.address(), 0));
7305 __ mov(Operand(
esp, 1 * kPointerSize), regs_.scratch0());
7307 __ mov(Operand(
esp, 2 * kPointerSize),
7308 Immediate(ExternalReference::isolate_address()));
7310 AllowExternalCallThatCantCauseGC scope(masm);
7313 ExternalReference::incremental_evacuation_record_write_function(
7319 ExternalReference::incremental_marking_record_write_function(
7323 regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode_);
7327 void RecordWriteStub::CheckNeedsToInformIncrementalMarker(
7328 MacroAssembler* masm,
7329 OnNoNeedToInformIncrementalMarker on_no_need,
7331 Label object_is_black, need_incremental, need_incremental_pop_object;
7334 __ and_(regs_.scratch0(), regs_.object());
7335 __ mov(regs_.scratch1(),
7336 Operand(regs_.scratch0(),
7338 __ sub(regs_.scratch1(), Immediate(1));
7339 __ mov(Operand(regs_.scratch0(),
7346 __ JumpIfBlack(regs_.object(),
7352 regs_.Restore(masm);
7353 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
7354 __ RememberedSetHelper(object_,
7363 __ bind(&object_is_black);
7366 __ mov(regs_.scratch0(), Operand(regs_.address(), 0));
7369 Label ensure_not_white;
7371 __ CheckPageFlag(regs_.scratch0(),
7378 __ CheckPageFlag(regs_.object(),
7385 __ jmp(&need_incremental);
7387 __ bind(&ensure_not_white);
7392 __ push(regs_.object());
7393 __ EnsureNotWhite(regs_.scratch0(),
7396 &need_incremental_pop_object,
7398 __ pop(regs_.object());
7400 regs_.Restore(masm);
7401 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
7402 __ RememberedSetHelper(object_,
7411 __ bind(&need_incremental_pop_object);
7412 __ pop(regs_.object());
7414 __ bind(&need_incremental);
7420 void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) {
7431 Label double_elements;
7433 Label slow_elements;
7434 Label slow_elements_from_double;
7435 Label fast_elements;
7437 __ CheckFastElements(
edi, &double_elements);
7440 __ JumpIfSmi(
eax, &smi_element);
7441 __ CheckFastSmiElements(
edi, &fast_elements, Label::kNear);
7446 __ bind(&slow_elements);
7457 __ TailCallRuntime(Runtime::kStoreArrayLiteralElement, 5, 1);
7459 __ bind(&slow_elements_from_double);
7461 __ jmp(&slow_elements);
7464 __ bind(&fast_elements);
7478 __ bind(&smi_element);
7485 __ bind(&double_elements);
7489 __ StoreNumberToDoubleElements(
eax,
7494 &slow_elements_from_double,
7502 if (entry_hook_ !=
NULL) {
7504 masm->CallStub(&stub);
7509 void ProfileEntryHookStub::Generate(MacroAssembler* masm) {
7514 __ lea(
eax, Operand(
esp, kPointerSize));
7525 __ add(
esp, Immediate(2 * kPointerSize));
7536 #endif // V8_TARGET_ARCH_IA32
static const int kResourceDataOffset
static const int kCallerFPOffset
void GenerateFast(MacroAssembler *masm)
void GenerateSlow(MacroAssembler *masm, const RuntimeCallHelper &call_helper)
static const int kLengthOffset
static const int kBitFieldOffset
void GenerateFast(MacroAssembler *masm)
STATIC_CHECK((kStringRepresentationMask|kStringEncodingMask)==Internals::kFullStringRepresentationMask)
const intptr_t kSmiTagMask
static const int kCodeOffset
static const int kEvacuationCandidateMask
#define CHECK_EQ(expected, value)
void GenerateSlow(MacroAssembler *masm, const RuntimeCallHelper &call_helper)
static const int kCodeEntryOffset
static const int kMaxAsciiCharCode
static const int kPrototypeOrInitialMapOffset
#define COMPARE(asm_, compare_string)
static int SlotOffset(int index)
RecordWriteStub(Register object, Register value, Register address, RememberedSetAction remembered_set_action, SaveFPRegsMode fp_mode)
static const int kArgumentsObjectSize
static void GenerateFixedRegStubsAheadOfTime()
const uint32_t kTwoByteStringTag
const int kFailureTypeTagSize
static const uint32_t kExponentMask
static const char * GetName(TypeInfo type_info)
static Failure * InternalError()
static void GenerateCopyCharacters(MacroAssembler *masm, Register dest, Register src, Register count, Register scratch, bool ascii)
static void PerformGC(Object *result)
static const char * Name(Value tok)
static Smi * FromInt(int value)
void Generate(MacroAssembler *masm)
static const byte kTwoByteNopInstruction
static const int kOptimizedCodeMapOffset
static const int kDataOffset
static const int kGlobalReceiverOffset
static const int kJSRegexpStaticOffsetsVectorSize
void Generate(MacroAssembler *masm)
static Failure * OutOfMemoryException()
static const int kEmptyHashField
static void GenerateHashGetHash(MacroAssembler *masm, Register hash)
static const int kExponentBias
static const intptr_t kPageAlignmentMask
static Handle< Object > UninitializedSentinel(Isolate *isolate)
static bool IsSupported(CpuFeature f)
static Failure * Exception()
void Generate(MacroAssembler *masm)
static const int kCallInstructionLength
virtual bool IsPregenerated()
void Generate(MacroAssembler *masm)
static const int kCallerSPOffset
bool CanBeUndetectable() const
#define ASSERT(condition)
const int kPointerSizeLog2
static const int kInstanceSizeOffset
static void GenerateCompareFlatAsciiStrings(MacroAssembler *masm, Register left, Register right, Register scratch1, Register scratch2, Register scratch3, Register scratch4)
static Handle< Object > MegamorphicSentinel(Isolate *isolate)
static const char * GetName(TypeInfo type_info)
const uint32_t kStringRepresentationMask
static void GenerateOperation(MacroAssembler *masm, TranscendentalCache::Type type)
MemOperand GlobalObjectOperand()
static const int kEntrySize
MemOperand ContextOperand(Register context, int index)
static const int kContextOffset
const uint32_t kAsciiDataHintTag
const uint32_t kShortExternalStringMask
static void GenerateNegativeLookup(MacroAssembler *masm, Label *miss, Label *done, Register receiver, Register properties, Handle< String > name, Register scratch0)
static const int kLastSubjectOffset
static const int kZeroHash
void Generate(MacroAssembler *masm)
static const int kHashFieldOffset
static const int kLastCaptureCountOffset
static const int kFirstOffset
static const int kMinLength
StringDictionaryLookupStub(LookupMode mode)
const uint32_t kNotStringTag
static const int kParentOffset
static const int kNonMantissaBitsInTopWord
static const int kLiteralsOffset
static const int kArgumentsObjectSizeStrict
static void GenerateCopyCharactersREP(MacroAssembler *masm, Register dest, Register src, Register count, Register scratch, bool ascii)
STATIC_ASSERT((FixedDoubleArray::kHeaderSize &kDoubleAlignmentMask)==0)
static const int kLengthOffset
static const int kCapacityOffset
const uint32_t kIsSymbolMask
static const int kExponentShift
const intptr_t kFailureTagMask
static const int kValueOffset
const int kFailureTagSize
static void GenerateFlatAsciiStringEquals(MacroAssembler *masm, Register left, Register right, Register scratch1, Register scratch2, Register scratch3)
static const int kIrregexpCaptureCountOffset
static const int kInputOffset
static const size_t kWriteBarrierCounterOffset
static bool IsBitOp(Value op)
const uint32_t kIsIndirectStringMask
void Generate(MacroAssembler *masm)
virtual bool IsPregenerated()
static const int kStringWrapperSafeForDefaultValueOf
static void MaybeCallEntryHook(MacroAssembler *masm)
Operand FieldOperand(Register object, int offset)
const uint32_t kAsciiDataHintMask
static const byte kFiveByteNopInstruction
void Generate(MacroAssembler *masm)
static const int kPropertiesOffset
static const int kMinLength
const uint32_t kShortExternalStringTag
static void GenerateHashAddCharacter(MacroAssembler *masm, Register hash, Register character)
static void Generate(MacroAssembler *masm, Register string, Register index, Register result, Label *call_runtime)
static const int kHeaderSize
static const int kNextFunctionLinkOffset
static int SizeFor(int length)
static const int kElementsOffset
const uint32_t kStringTag
const uint32_t kQuietNaNHighBitsMask
static bool IsEqualityOp(Value op)
static const int kOffsetOffset
void Generate(MacroAssembler *masm)
static const int kLengthOffset
static int SizeFor(int length)
void GenerateSlow(MacroAssembler *masm, const RuntimeCallHelper &call_helper)
virtual void Generate(MacroAssembler *masm)
static const int kLastMatchOverhead
static const int kHeaderSize
void Generate(MacroAssembler *masm)
static const int kMapOffset
bool is(Register reg) const
static const int kSkipEvacuationSlotsRecordingMask
const uint32_t kIsNotStringMask
static void GenerateFixedRegStubsAheadOfTime()
const uint32_t kSlicedNotConsMask
static const int kLengthOffset
void Generate(MacroAssembler *masm)
void Generate(MacroAssembler *masm)
static const int kSecondOffset
static void GeneratePositiveLookup(MacroAssembler *masm, Label *miss, Label *done, Register elements, Register name, Register r0, Register r1)
static const int kArgumentsLengthIndex
static const int kContextOffset
static const int kFunctionOffset
static const int kFirstCaptureOffset
static const uint32_t kHashBitMask
static const uint32_t kSignMask
static const int kLastInputOffset
static const int kHeaderSize
void GenerateBody(MacroAssembler *masm, bool is_construct)
static const int kDataAsciiCodeOffset
Condition NegateCondition(Condition cond)
#define ASSERT_EQ(v1, v2)
static XMMRegister from_code(int code)
static void GenerateAheadOfTime()
static const uint32_t kMantissaMask
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print more details following each garbage collection print amount of external allocated memory after each time it is adjusted flush code that we expect not to use again before full gc do incremental marking steps track object counts and memory usage use caching Perform compaction on every full GC Never perform compaction on full GC testing only Compact code space on full incremental collections Default seed for initializing random allows verbose printing trace parsing and preparsing Check icache flushes in ARM and MIPS simulator Stack alingment in bytes in print stack trace when throwing exceptions randomize hashes to avoid predictable hash Fixed seed to use to hash property activate a timer that switches between V8 threads testing_bool_flag float flag Seed used for threading test randomness A filename with extra code to be included in the Print usage message
static const int kArgumentsCalleeIndex
static Operand FixedArrayElementOperand(Register array, Register index_as_smi, int additional_offset=0)
static const int kIsUndetectable
static const int kHeaderSize
void Generate(MacroAssembler *masm)
static const int kEntryLength
void GenerateFast(MacroAssembler *masm)
static void GenerateLookupNumberStringCache(MacroAssembler *masm, Register object, Register result, Register scratch1, Register scratch2, Register scratch3, bool object_is_smi, Label *not_found)
static const int kDataTagOffset
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if NULL
static const int kPrototypeOffset
static const int kElementsStartOffset
#define RUNTIME_ENTRY(name, nargs, ressize)
static void GenerateTwoCharacterSymbolTableProbe(MacroAssembler *masm, Register c1, Register c2, Register scratch1, Register scratch2, Register scratch3, Register scratch4, Register scratch5, Label *not_found)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
static const int kMaxLength
static const int kValueOffset
bool Contains(Type type) const
const uint32_t kSymbolTag
static const int kNativeContextOffset
const uint32_t kAsciiStringTag
static const int kConstructStubOffset
static const int kNumRegisters
static const int kHashShift
static const int kSharedFunctionInfoOffset
static const int kMaxValue
void Generate(MacroAssembler *masm)
static const int kBitField2Offset
void Generate(MacroAssembler *masm)
CEntryStub(int result_size, SaveFPRegsMode save_doubles=kDontSaveFPRegs)
void check(i::Vector< const char > string)
static const int kExponentOffset
static const int kValueOffset
static const int kDataUC16CodeOffset
void Generate(MacroAssembler *masm)
StoreBufferOverflowStub(SaveFPRegsMode save_fp)
static void GenerateHashInit(MacroAssembler *masm, Register hash, Register character)
static bool IsOrderedRelationalCompareOp(Value op)
const uint32_t kStringEncodingMask
static const int kInstanceTypeOffset
static const int kIndexOffset
static const int kMantissaOffset
void Generate(MacroAssembler *masm)
void Generate(MacroAssembler *masm)