30 #if V8_TARGET_ARCH_X64
44 CodeStubInterfaceDescriptor* descriptor) {
45 static Register registers[] = {
rbx };
46 descriptor->register_param_count_ = 1;
47 descriptor->register_params_ = registers;
48 descriptor->deoptimization_handler_ =
53 void FastNewContextStub::InitializeInterfaceDescriptor(
55 CodeStubInterfaceDescriptor* descriptor) {
56 static Register registers[] = {
rdi };
57 descriptor->register_param_count_ = 1;
58 descriptor->register_params_ = registers;
59 descriptor->deoptimization_handler_ =
NULL;
65 CodeStubInterfaceDescriptor* descriptor) {
66 static Register registers[] = {
rax };
67 descriptor->register_param_count_ = 1;
68 descriptor->register_params_ = registers;
69 descriptor->deoptimization_handler_ =
NULL;
73 void NumberToStringStub::InitializeInterfaceDescriptor(
75 CodeStubInterfaceDescriptor* descriptor) {
76 static Register registers[] = {
rax };
77 descriptor->register_param_count_ = 1;
78 descriptor->register_params_ = registers;
79 descriptor->deoptimization_handler_ =
86 CodeStubInterfaceDescriptor* descriptor) {
87 static Register registers[] = {
rax,
rbx,
rcx };
88 descriptor->register_param_count_ = 3;
89 descriptor->register_params_ = registers;
90 descriptor->deoptimization_handler_ =
92 Runtime::kHiddenCreateArrayLiteralStubBailout)->
entry;
98 CodeStubInterfaceDescriptor* descriptor) {
100 descriptor->register_param_count_ = 4;
101 descriptor->register_params_ = registers;
102 descriptor->deoptimization_handler_ =
109 CodeStubInterfaceDescriptor* descriptor) {
110 static Register registers[] = {
rbx,
rdx };
111 descriptor->register_param_count_ = 2;
112 descriptor->register_params_ = registers;
113 descriptor->deoptimization_handler_ =
NULL;
119 CodeStubInterfaceDescriptor* descriptor) {
120 static Register registers[] = {
rdx,
rax };
121 descriptor->register_param_count_ = 2;
122 descriptor->register_params_ = registers;
123 descriptor->deoptimization_handler_ =
130 CodeStubInterfaceDescriptor* descriptor) {
131 static Register registers[] = {
rdx,
rax };
132 descriptor->register_param_count_ = 2;
133 descriptor->register_params_ = registers;
134 descriptor->deoptimization_handler_ =
139 void RegExpConstructResultStub::InitializeInterfaceDescriptor(
141 CodeStubInterfaceDescriptor* descriptor) {
142 static Register registers[] = {
rcx,
rbx,
rax };
143 descriptor->register_param_count_ = 3;
144 descriptor->register_params_ = registers;
145 descriptor->deoptimization_handler_ =
152 CodeStubInterfaceDescriptor* descriptor) {
153 static Register registers[] = {
rax };
154 descriptor->register_param_count_ = 1;
155 descriptor->register_params_ = registers;
156 descriptor->deoptimization_handler_ =
NULL;
162 CodeStubInterfaceDescriptor* descriptor) {
163 static Register registers[] = {
rdx };
164 descriptor->register_param_count_ = 1;
165 descriptor->register_params_ = registers;
166 descriptor->deoptimization_handler_ =
NULL;
172 CodeStubInterfaceDescriptor* descriptor) {
173 static Register registers[] = {
rax,
rcx };
174 descriptor->register_param_count_ = 2;
175 descriptor->register_params_ = registers;
176 descriptor->deoptimization_handler_ =
NULL;
182 CodeStubInterfaceDescriptor* descriptor) {
183 static Register registers[] = {
rdx,
rax };
184 descriptor->register_param_count_ = 2;
185 descriptor->register_params_ = registers;
186 descriptor->deoptimization_handler_ =
NULL;
192 CodeStubInterfaceDescriptor* descriptor) {
193 static Register registers[] = {
rdx,
rcx,
rax };
194 descriptor->register_param_count_ = 3;
195 descriptor->register_params_ = registers;
196 descriptor->deoptimization_handler_ =
203 CodeStubInterfaceDescriptor* descriptor) {
204 static Register registers[] = {
rax,
rbx };
205 descriptor->register_param_count_ = 2;
206 descriptor->register_params_ = registers;
207 descriptor->deoptimization_handler_ =
212 static void InitializeArrayConstructorDescriptor(
214 CodeStubInterfaceDescriptor* descriptor,
215 int constant_stack_parameter_count) {
220 static Register registers_variable_args[] = {
rdi,
rbx,
rax };
221 static Register registers_no_args[] = {
rdi,
rbx };
223 if (constant_stack_parameter_count == 0) {
224 descriptor->register_param_count_ = 2;
225 descriptor->register_params_ = registers_no_args;
229 descriptor->stack_parameter_count_ =
rax;
230 descriptor->register_param_count_ = 3;
231 descriptor->register_params_ = registers_variable_args;
234 descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count;
236 descriptor->deoptimization_handler_ =
241 static void InitializeInternalArrayConstructorDescriptor(
243 CodeStubInterfaceDescriptor* descriptor,
244 int constant_stack_parameter_count) {
248 static Register registers_variable_args[] = {
rdi,
rax };
249 static Register registers_no_args[] = {
rdi };
251 if (constant_stack_parameter_count == 0) {
252 descriptor->register_param_count_ = 1;
253 descriptor->register_params_ = registers_no_args;
257 descriptor->stack_parameter_count_ =
rax;
258 descriptor->register_param_count_ = 2;
259 descriptor->register_params_ = registers_variable_args;
262 descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count;
264 descriptor->deoptimization_handler_ =
271 CodeStubInterfaceDescriptor* descriptor) {
272 InitializeArrayConstructorDescriptor(isolate, descriptor, 0);
278 CodeStubInterfaceDescriptor* descriptor) {
279 InitializeArrayConstructorDescriptor(isolate, descriptor, 1);
285 CodeStubInterfaceDescriptor* descriptor) {
286 InitializeArrayConstructorDescriptor(isolate, descriptor, -1);
292 CodeStubInterfaceDescriptor* descriptor) {
293 InitializeInternalArrayConstructorDescriptor(isolate, descriptor, 0);
299 CodeStubInterfaceDescriptor* descriptor) {
300 InitializeInternalArrayConstructorDescriptor(isolate, descriptor, 1);
306 CodeStubInterfaceDescriptor* descriptor) {
307 InitializeInternalArrayConstructorDescriptor(isolate, descriptor, -1);
313 CodeStubInterfaceDescriptor* descriptor) {
314 static Register registers[] = {
rax };
315 descriptor->register_param_count_ = 1;
316 descriptor->register_params_ = registers;
317 descriptor->deoptimization_handler_ =
319 descriptor->SetMissHandler(
320 ExternalReference(IC_Utility(IC::kCompareNilIC_Miss), isolate));
326 CodeStubInterfaceDescriptor* descriptor) {
327 static Register registers[] = {
rax };
328 descriptor->register_param_count_ = 1;
329 descriptor->register_params_ = registers;
330 descriptor->deoptimization_handler_ =
332 descriptor->SetMissHandler(
333 ExternalReference(IC_Utility(IC::kToBooleanIC_Miss), isolate));
339 CodeStubInterfaceDescriptor* descriptor) {
340 static Register registers[] = {
rdx,
rcx,
rax };
341 descriptor->register_param_count_ = 3;
342 descriptor->register_params_ = registers;
343 descriptor->deoptimization_handler_ =
350 CodeStubInterfaceDescriptor* descriptor) {
352 descriptor->register_param_count_ = 4;
353 descriptor->register_params_ = registers;
354 descriptor->deoptimization_handler_ =
361 CodeStubInterfaceDescriptor* descriptor) {
362 static Register registers[] = {
rdx,
rax };
363 descriptor->register_param_count_ = 2;
364 descriptor->register_params_ = registers;
365 descriptor->deoptimization_handler_ =
FUNCTION_ADDR(BinaryOpIC_Miss);
366 descriptor->SetMissHandler(
367 ExternalReference(IC_Utility(IC::kBinaryOpIC_Miss), isolate));
371 void BinaryOpWithAllocationSiteStub::InitializeInterfaceDescriptor(
373 CodeStubInterfaceDescriptor* descriptor) {
374 static Register registers[] = {
rcx,
rdx,
rax };
375 descriptor->register_param_count_ = 3;
376 descriptor->register_params_ = registers;
377 descriptor->deoptimization_handler_ =
382 void StringAddStub::InitializeInterfaceDescriptor(
384 CodeStubInterfaceDescriptor* descriptor) {
385 static Register registers[] = {
rdx,
rax };
386 descriptor->register_param_count_ = 2;
387 descriptor->register_params_ = registers;
388 descriptor->deoptimization_handler_ =
395 CallInterfaceDescriptor* descriptor =
397 static Register registers[] = {
rdi,
402 static Representation representations[] = {
408 descriptor->register_param_count_ = 4;
409 descriptor->register_params_ = registers;
410 descriptor->param_representations_ = representations;
413 CallInterfaceDescriptor* descriptor =
415 static Register registers[] = {
rsi,
418 static Representation representations[] = {
422 descriptor->register_param_count_ = 2;
423 descriptor->register_params_ = registers;
424 descriptor->param_representations_ = representations;
427 CallInterfaceDescriptor* descriptor =
429 static Register registers[] = {
rsi,
432 static Representation representations[] = {
436 descriptor->register_param_count_ = 2;
437 descriptor->register_params_ = registers;
438 descriptor->param_representations_ = representations;
441 CallInterfaceDescriptor* descriptor =
443 static Register registers[] = {
rsi,
446 static Representation representations[] = {
450 descriptor->register_param_count_ = 2;
451 descriptor->register_params_ = registers;
452 descriptor->param_representations_ = representations;
455 CallInterfaceDescriptor* descriptor =
457 static Register registers[] = {
rax,
463 static Representation representations[] = {
470 descriptor->register_param_count_ = 5;
471 descriptor->register_params_ = registers;
472 descriptor->param_representations_ = representations;
477 #define __ ACCESS_MASM(masm)
480 void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm) {
482 Isolate* isolate = masm->isolate();
483 isolate->counters()->code_stubs()->Increment();
486 int param_count = descriptor->register_param_count_;
490 ASSERT(descriptor->register_param_count_ == 0 ||
491 rax.
is(descriptor->register_params_[param_count - 1]));
493 for (
int i = 0; i < param_count; ++i) {
494 __ Push(descriptor->register_params_[i]);
496 ExternalReference miss = descriptor->miss_handler();
497 __ CallExternalReference(miss, descriptor->register_param_count_);
505 __ PushCallerSaved(save_doubles_);
506 const int argument_count = 1;
507 __ PrepareCallCFunction(argument_count);
509 ExternalReference::isolate_address(masm->isolate()));
511 AllowExternalCallThatCantCauseGC scope(masm);
513 ExternalReference::store_buffer_overflow_function(masm->isolate()),
515 __ PopCallerSaved(save_doubles_);
520 class FloatingPointHelper :
public AllStatic {
522 enum ConvertUndefined {
523 CONVERT_UNDEFINED_TO_ZERO,
530 static void LoadSSE2UnknownOperands(MacroAssembler* masm,
536 Register input_reg = this->
source();
540 Label check_negative, process_64_bits, done;
542 int double_offset =
offset();
552 Register scratch_candidates[3] = {
rbx,
rdx,
rdi };
553 for (
int i = 0; i < 3; i++) {
554 scratch1 = scratch_candidates[i];
555 if (!final_result_reg.is(scratch1) && !input_reg.is(scratch1))
break;
560 Register result_reg = final_result_reg.
is(
rcx) ?
rax : final_result_reg;
564 Register save_reg = final_result_reg.
is(
rcx) ?
rax :
rcx;
568 bool stash_exponent_copy = !input_reg.is(
rsp);
569 __ movl(scratch1, mantissa_operand);
570 __ movsd(
xmm0, mantissa_operand);
571 __ movl(
rcx, exponent_operand);
572 if (stash_exponent_copy)
__ pushq(
rcx);
582 __ subl(
rcx, Immediate(delta));
583 __ xorl(result_reg, result_reg);
584 __ cmpl(
rcx, Immediate(31));
586 __ shll_cl(scratch1);
587 __ jmp(&check_negative);
589 __ bind(&process_64_bits);
590 __ cvttsd2siq(result_reg,
xmm0);
591 __ jmp(&done, Label::kNear);
594 __ bind(&check_negative);
595 __ movl(result_reg, scratch1);
597 if (stash_exponent_copy) {
600 __ cmpl(exponent_operand, Immediate(0));
606 if (stash_exponent_copy) {
609 if (!final_result_reg.is(result_reg)) {
611 __ movl(final_result_reg, result_reg);
619 void FloatingPointHelper::LoadSSE2UnknownOperands(MacroAssembler* masm,
620 Label* not_numbers) {
621 Label load_smi_rdx, load_nonsmi_rax, load_smi_rax, load_float_rax, done;
623 __ LoadRoot(
rcx, Heap::kHeapNumberMapRootIndex);
624 __ JumpIfSmi(
rdx, &load_smi_rdx);
629 __ JumpIfSmi(
rax, &load_smi_rax);
631 __ bind(&load_nonsmi_rax);
637 __ bind(&load_smi_rdx);
640 __ JumpIfNotSmi(
rax, &load_nonsmi_rax);
642 __ bind(&load_smi_rax);
650 const Register exponent =
rdx;
651 const Register base =
rax;
652 const Register scratch =
rcx;
653 const XMMRegister double_result =
xmm3;
654 const XMMRegister double_base =
xmm2;
655 const XMMRegister double_exponent =
xmm1;
656 const XMMRegister double_scratch =
xmm4;
658 Label call_runtime, done, exponent_not_smi, int_exponent;
661 __ movp(scratch, Immediate(1));
662 __ Cvtlsi2sd(double_result, scratch);
665 Label base_is_smi, unpack_exponent;
670 __ movp(base, args.GetArgumentOperand(0));
671 __ movp(exponent, args.GetArgumentOperand(1));
672 __ JumpIfSmi(base, &base_is_smi, Label::kNear);
674 Heap::kHeapNumberMapRootIndex);
678 __ jmp(&unpack_exponent, Label::kNear);
680 __ bind(&base_is_smi);
681 __ SmiToInteger32(base, base);
682 __ Cvtlsi2sd(double_base, base);
683 __ bind(&unpack_exponent);
685 __ JumpIfNotSmi(exponent, &exponent_not_smi, Label::kNear);
686 __ SmiToInteger32(exponent, exponent);
687 __ jmp(&int_exponent);
689 __ bind(&exponent_not_smi);
691 Heap::kHeapNumberMapRootIndex);
694 }
else if (exponent_type_ ==
TAGGED) {
695 __ JumpIfNotSmi(exponent, &exponent_not_smi, Label::kNear);
696 __ SmiToInteger32(exponent, exponent);
697 __ jmp(&int_exponent);
699 __ bind(&exponent_not_smi);
703 if (exponent_type_ !=
INTEGER) {
704 Label fast_power, try_arithmetic_simplification;
706 __ DoubleToI(exponent, double_exponent, double_scratch,
708 __ jmp(&int_exponent);
710 __ bind(&try_arithmetic_simplification);
711 __ cvttsd2si(exponent, double_exponent);
713 __ cmpl(exponent, Immediate(0x1));
720 Label continue_sqrt, continue_rsqrt, not_plus_half;
724 __ movq(double_scratch, scratch);
726 __ ucomisd(double_scratch, double_exponent);
734 __ movq(double_scratch, scratch);
735 __ ucomisd(double_scratch, double_base);
739 __ j(
carry, &continue_sqrt, Label::kNear);
742 __ xorps(double_result, double_result);
743 __ subsd(double_result, double_scratch);
746 __ bind(&continue_sqrt);
748 __ xorps(double_scratch, double_scratch);
749 __ addsd(double_scratch, double_base);
750 __ sqrtsd(double_result, double_scratch);
754 __ bind(¬_plus_half);
756 __ subsd(double_scratch, double_result);
758 __ ucomisd(double_scratch, double_exponent);
766 __ movq(double_scratch, scratch);
767 __ ucomisd(double_scratch, double_base);
771 __ j(
carry, &continue_rsqrt, Label::kNear);
774 __ xorps(double_result, double_result);
777 __ bind(&continue_rsqrt);
779 __ xorps(double_exponent, double_exponent);
780 __ addsd(double_exponent, double_base);
781 __ sqrtsd(double_exponent, double_exponent);
782 __ divsd(double_result, double_exponent);
787 Label fast_power_failed;
788 __ bind(&fast_power);
792 __ movsd(Operand(
rsp, 0), double_exponent);
793 __ fld_d(Operand(
rsp, 0));
794 __ movsd(Operand(
rsp, 0), double_base);
795 __ fld_d(Operand(
rsp, 0));
814 __ testb(
rax, Immediate(0x5F));
815 __ j(
not_zero, &fast_power_failed, Label::kNear);
816 __ fstp_d(Operand(
rsp, 0));
817 __ movsd(double_result, Operand(
rsp, 0));
821 __ bind(&fast_power_failed);
824 __ jmp(&call_runtime);
828 __ bind(&int_exponent);
829 const XMMRegister double_scratch2 = double_exponent;
831 __ movp(scratch, exponent);
832 __ movsd(double_scratch, double_base);
833 __ movsd(double_scratch2, double_result);
836 Label no_neg, while_true, while_false;
837 __ testl(scratch, scratch);
842 __ j(
zero, &while_false, Label::kNear);
843 __ shrl(scratch, Immediate(1));
846 __ j(
above, &while_true, Label::kNear);
847 __ movsd(double_result, double_scratch);
848 __ j(
zero, &while_false, Label::kNear);
850 __ bind(&while_true);
851 __ shrl(scratch, Immediate(1));
852 __ mulsd(double_scratch, double_scratch);
853 __ j(
above, &while_true, Label::kNear);
854 __ mulsd(double_result, double_scratch);
857 __ bind(&while_false);
859 __ testl(exponent, exponent);
861 __ divsd(double_scratch2, double_result);
862 __ movsd(double_result, double_scratch2);
865 __ xorps(double_scratch2, double_scratch2);
866 __ ucomisd(double_scratch2, double_result);
871 __ Cvtlsi2sd(double_exponent, exponent);
874 Counters* counters = masm->isolate()->counters();
877 __ bind(&call_runtime);
878 __ TailCallRuntime(Runtime::kMath_pow_cfunction, 2, 1);
883 __ AllocateHeapNumber(
rax,
rcx, &call_runtime);
885 __ IncrementCounter(counters->math_pow(), 1);
888 __ bind(&call_runtime);
890 __ movsd(
xmm0, double_base);
893 AllowExternalCallThatCantCauseGC scope(masm);
894 __ PrepareCallCFunction(2);
896 ExternalReference::power_double_double_function(masm->isolate()), 2);
899 __ movsd(double_result,
xmm0);
902 __ IncrementCounter(counters->math_pow(), 1);
911 if (
kind() == Code::KEYED_LOAD_IC) {
917 __ Cmp(
rax, masm->isolate()->factory()->prototype_string());
930 StubCompiler::GenerateLoadFunctionPrototype(masm, receiver,
r8,
r9, &miss);
932 StubCompiler::TailCallBuiltin(
937 void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
942 __ JumpIfNotSmi(
rdx, &slow);
964 __ movp(
rax, args.GetArgumentOperand(0));
978 StackArgumentsAccessor adaptor_args(
rbx,
rcx,
980 __ movp(
rax, adaptor_args.GetArgumentOperand(0));
986 __ PopReturnAddressTo(
rbx);
988 __ PushReturnAddressFrom(
rbx);
989 __ TailCallRuntime(Runtime::kGetArgumentsProperty, 1, 1);
993 void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) {
1003 Factory* factory = masm->isolate()->factory();
1006 __ SmiToInteger64(
rbx, args.GetArgumentOperand(2));
1011 Label adaptor_frame, try_allocate;
1019 __ jmp(&try_allocate, Label::kNear);
1022 __ bind(&adaptor_frame);
1023 __ SmiToInteger64(
rcx,
1028 __ movp(args.GetArgumentOperand(1),
rdx);
1037 __ bind(&try_allocate);
1041 const int kParameterMapHeaderSize =
1043 Label no_parameter_map;
1046 __ j(
zero, &no_parameter_map, Label::kNear);
1048 __ bind(&no_parameter_map);
1062 Label has_mapped_parameters, copy;
1066 __ j(
not_zero, &has_mapped_parameters, Label::kNear);
1070 __ jmp(©, Label::kNear);
1073 __ bind(&has_mapped_parameters);
1089 __ movp(
rdx, args.GetArgumentOperand(0));
1114 Label skip_parameter_map;
1116 __ j(
zero, &skip_parameter_map);
1121 __ Integer64PlusConstantToSmi(
r9,
rbx, 2);
1135 Label parameters_loop, parameters_test;
1140 __ addp(
r8, args.GetArgumentOperand(2));
1142 __ Move(
r11, factory->the_hole_value());
1150 __ jmp(¶meters_test, Label::kNear);
1152 __ bind(¶meters_loop);
1157 kParameterMapHeaderSize),
1164 __ bind(¶meters_test);
1166 __ j(
not_zero, ¶meters_loop, Label::kNear);
1168 __ bind(&skip_parameter_map);
1174 factory->fixed_array_map());
1177 Label arguments_loop, arguments_test;
1179 __ movp(
rdx, args.GetArgumentOperand(1));
1184 __ jmp(&arguments_test, Label::kNear);
1186 __ bind(&arguments_loop);
1187 __ subp(
rdx, Immediate(kPointerSize));
1193 __ addp(
r8, Immediate(1));
1195 __ bind(&arguments_test);
1197 __ j(
less, &arguments_loop, Label::kNear);
1200 __ ret(3 * kPointerSize);
1206 __ movp(args.GetArgumentOperand(2),
rcx);
1207 __ TailCallRuntime(Runtime::kHiddenNewArgumentsFast, 3, 1);
1211 void ArgumentsAccessStub::GenerateNewSloppySlow(MacroAssembler* masm) {
1227 __ movp(args.GetArgumentOperand(2),
rcx);
1231 __ movp(args.GetArgumentOperand(1),
rdx);
1234 __ TailCallRuntime(Runtime::kHiddenNewArgumentsFast, 3, 1);
1238 void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
1245 Label adaptor_frame, try_allocate, runtime;
1253 __ movp(
rcx, args.GetArgumentOperand(2));
1255 __ jmp(&try_allocate);
1258 __ bind(&adaptor_frame);
1260 __ movp(args.GetArgumentOperand(2),
rcx);
1264 __ movp(args.GetArgumentOperand(1),
rdx);
1268 Label add_arguments_object;
1269 __ bind(&try_allocate);
1271 __ j(
zero, &add_arguments_object, Label::kNear);
1273 __ bind(&add_arguments_object);
1284 __ movp(
rdi, Operand(
rdi, offset));
1294 __ movp(
rcx, args.GetArgumentOperand(2));
1305 __ movp(
rdx, args.GetArgumentOperand(1));
1322 __ movp(
rbx, Operand(
rdx, -1 * kPointerSize));
1324 __ addp(
rdi, Immediate(kPointerSize));
1325 __ subp(
rdx, Immediate(kPointerSize));
1331 __ ret(3 * kPointerSize);
1335 __ TailCallRuntime(Runtime::kHiddenNewStrictArgumentsFast, 3, 1);
1339 void RegExpExecStub::Generate(MacroAssembler* masm) {
1343 #ifdef V8_INTERPRETED_REGEXP
1344 __ TailCallRuntime(Runtime::kHiddenRegExpExec, 4, 1);
1345 #else // V8_INTERPRETED_REGEXP
1354 enum RegExpExecStubArgumentIndices {
1355 JS_REG_EXP_OBJECT_ARGUMENT_INDEX,
1356 SUBJECT_STRING_ARGUMENT_INDEX,
1357 PREVIOUS_INDEX_ARGUMENT_INDEX,
1358 LAST_MATCH_INFO_ARGUMENT_INDEX,
1359 REG_EXP_EXEC_ARGUMENT_COUNT
1362 StackArgumentsAccessor args(
rsp, REG_EXP_EXEC_ARGUMENT_COUNT,
1366 Isolate* isolate = masm->isolate();
1367 ExternalReference address_of_regexp_stack_memory_address =
1368 ExternalReference::address_of_regexp_stack_memory_address(isolate);
1369 ExternalReference address_of_regexp_stack_memory_size =
1370 ExternalReference::address_of_regexp_stack_memory_size(isolate);
1376 __ movp(
rax, args.GetArgumentOperand(JS_REG_EXP_OBJECT_ARGUMENT_INDEX));
1377 __ JumpIfSmi(
rax, &runtime);
1383 if (FLAG_debug_code) {
1386 kUnexpectedTypeForRegExpDataFixedArrayExpected);
1388 __ Check(
equal, kUnexpectedTypeForRegExpDataFixedArrayExpected);
1399 __ SmiToInteger32(
rdx,
1409 __ movp(
rdi, args.GetArgumentOperand(SUBJECT_STRING_ARGUMENT_INDEX));
1410 __ JumpIfSmi(
rdi, &runtime);
1437 Label seq_one_byte_string , seq_two_byte_string ,
1438 external_string , check_underlying ,
1439 not_seq_nor_cons , check_code ,
1448 __ j(
zero, &seq_two_byte_string);
1455 __ j(
zero, &seq_one_byte_string, Label::kNear);
1470 Heap::kempty_stringRootIndex);
1473 __ bind(&check_underlying);
1480 __ j(
zero, &seq_two_byte_string);
1489 __ bind(&seq_one_byte_string);
1495 __ bind(&check_code);
1500 __ JumpIfSmi(
r11, &runtime);
1511 __ movp(
rbx, args.GetArgumentOperand(PREVIOUS_INDEX_ARGUMENT_INDEX));
1512 __ JumpIfNotSmi(
rbx, &runtime);
1522 Counters* counters = masm->isolate()->counters();
1523 __ IncrementCounter(counters->regexp_entry_native(), 1);
1526 static const int kRegExpExecuteArguments = 9;
1527 int argument_slots_on_stack =
1528 masm->ArgumentStackSlotsForCFunctionCall(kRegExpExecuteArguments);
1529 __ EnterApiExitFrame(argument_slots_on_stack);
1533 ExternalReference::isolate_address(masm->isolate()));
1560 ExternalReference::address_of_static_offsets_vector(isolate));
1578 Label setup_two_byte, setup_rest, got_length, length_not_from_slice;
1589 __ j(
zero, &setup_two_byte, Label::kNear);
1594 __ jmp(&setup_rest, Label::kNear);
1595 __ bind(&setup_two_byte);
1600 __ bind(&setup_rest);
1613 __ LeaveApiExitFrame(
true);
1618 __ cmpl(
rax, Immediate(1));
1621 __ j(
equal, &success, Label::kNear);
1630 __ LoadRoot(
rax, Heap::kNullValueRootIndex);
1631 __ ret(REG_EXP_EXEC_ARGUMENT_COUNT * kPointerSize);
1635 __ movp(
rax, args.GetArgumentOperand(JS_REG_EXP_OBJECT_ARGUMENT_INDEX));
1637 __ SmiToInteger32(
rax,
1644 __ movp(
r15, args.GetArgumentOperand(LAST_MATCH_INFO_ARGUMENT_INDEX));
1645 __ JumpIfSmi(
r15, &runtime);
1651 __ CompareRoot(
rax, Heap::kFixedArrayMapRootIndex);
1668 __ movp(
rax, args.GetArgumentOperand(SUBJECT_STRING_ARGUMENT_INDEX));
1671 __ RecordWriteField(
rbx,
1678 __ RecordWriteField(
rbx,
1686 ExternalReference::address_of_static_offsets_vector(isolate));
1691 Label next_capture, done;
1694 __ bind(&next_capture);
1695 __ subp(
rdx, Immediate(1));
1706 __ jmp(&next_capture);
1711 __ ret(REG_EXP_EXEC_ARGUMENT_COUNT * kPointerSize);
1713 __ bind(&exception);
1718 ExternalReference pending_exception_address(
1719 Isolate::kPendingExceptionAddress, isolate);
1720 Operand pending_exception_operand =
1721 masm->ExternalOperand(pending_exception_address,
rbx);
1722 __ movp(
rax, pending_exception_operand);
1723 __ LoadRoot(
rdx, Heap::kTheHoleValueRootIndex);
1726 __ movp(pending_exception_operand,
rdx);
1728 __ CompareRoot(
rax, Heap::kTerminationExceptionRootIndex);
1729 Label termination_exception;
1730 __ j(
equal, &termination_exception, Label::kNear);
1733 __ bind(&termination_exception);
1734 __ ThrowUncatchable(
rax);
1738 __ TailCallRuntime(Runtime::kHiddenRegExpExec, 4, 1);
1742 __ bind(¬_seq_nor_cons);
1744 __ j(
greater, ¬_long_external, Label::kNear);
1747 __ bind(&external_string);
1750 if (FLAG_debug_code) {
1754 __ Assert(
zero, kExternalStringExpectedButNotFound);
1768 __ bind(&seq_two_byte_string);
1771 __ jmp(&check_code);
1774 __ bind(¬_long_external);
1784 __ jmp(&check_underlying);
1785 #endif // V8_INTERPRETED_REGEXP
1789 static int NegativeComparisonResult(
Condition cc) {
1797 static void CheckInputType(MacroAssembler* masm,
1803 __ JumpIfNotSmi(input, fail);
1805 __ JumpIfSmi(input, &ok);
1806 __ CompareMap(input, masm->isolate()->factory()->heap_number_map());
1815 static void BranchIfNotInternalizedString(MacroAssembler* masm,
1819 __ JumpIfSmi(
object, label);
1829 void ICCompareStub::GenerateGeneric(MacroAssembler* masm) {
1830 Label check_unequal_objects, done;
1832 Factory* factory = masm->isolate()->factory();
1835 CheckInputType(masm,
rdx, left_, &miss);
1836 CheckInputType(masm,
rax, right_, &miss);
1839 Label non_smi, smi_done;
1840 __ JumpIfNotBothSmi(
rax,
rdx, &non_smi);
1856 Label not_identical;
1863 Label check_for_nan;
1864 __ CompareRoot(
rdx, Heap::kUndefinedValueRootIndex);
1866 __ Set(
rax, NegativeComparisonResult(cc));
1868 __ bind(&check_for_nan);
1876 factory->heap_number_map());
1877 __ j(
equal, &heap_number, Label::kNear);
1886 __ bind(&heap_number);
1901 __ bind(¬_identical);
1920 factory->heap_number_map());
1936 Label first_non_object;
1938 __ j(
below, &first_non_object, Label::kNear);
1940 Label return_not_equal;
1942 __ bind(&return_not_equal);
1945 __ bind(&first_non_object);
1948 __ j(
equal, &return_not_equal);
1955 __ j(
equal, &return_not_equal);
1963 Label non_number_comparison;
1965 FloatingPointHelper::LoadSSE2UnknownOperands(masm, &non_number_comparison);
1980 __ bind(&unordered);
1990 __ bind(&non_number_comparison);
1993 Label check_for_strings;
1995 BranchIfNotInternalizedString(
1997 BranchIfNotInternalizedString(
2006 __ bind(&check_for_strings);
2008 __ JumpIfNotBothSequentialAsciiStrings(
2029 __ Abort(kUnexpectedFallThroughFromStringComparison);
2032 __ bind(&check_unequal_objects);
2033 if (cc ==
equal && !strict()) {
2037 Label not_both_objects, return_unequal;
2045 __ j(
not_zero, ¬_both_objects, Label::kNear);
2047 __ j(
below, ¬_both_objects, Label::kNear);
2049 __ j(
below, ¬_both_objects, Label::kNear);
2052 __ j(
zero, &return_unequal, Label::kNear);
2055 __ j(
zero, &return_unequal, Label::kNear);
2059 __ bind(&return_unequal);
2063 __ bind(¬_both_objects);
2067 __ PopReturnAddressTo(
rcx);
2074 builtin = strict() ? Builtins::STRICT_EQUALS : Builtins::EQUALS;
2080 __ PushReturnAddressFrom(
rcx);
2091 static void GenerateRecordCallTarget(MacroAssembler* masm) {
2099 Isolate* isolate = masm->isolate();
2100 Label initialize, done, miss, megamorphic, not_array_function,
2101 done_no_smi_convert;
2115 if (!FLAG_pretenuring_call_new) {
2120 Handle<Map> allocation_site_map =
2121 masm->isolate()->factory()->allocation_site_map();
2140 __ bind(&megamorphic);
2147 __ bind(&initialize);
2149 if (!FLAG_pretenuring_call_new) {
2166 CreateAllocationSiteStub create_stub;
2167 __ CallStub(&create_stub);
2175 __ jmp(&done_no_smi_convert);
2177 __ bind(¬_array_function);
2196 __ bind(&done_no_smi_convert);
2205 Isolate* isolate = masm->isolate();
2206 Label slow, non_function, wrap, cont;
2207 StackArgumentsAccessor args(
rsp, argc_);
2209 if (NeedsChecks()) {
2211 __ JumpIfSmi(
rdi, &non_function);
2217 if (RecordCallTarget()) {
2218 GenerateRecordCallTarget(masm);
2222 __ LoadRoot(
rbx, Heap::kUndefinedValueRootIndex);
2227 ParameterCount actual(argc_);
2229 if (CallAsMethod()) {
2230 if (NeedsChecks()) {
2246 __ movp(
rax, args.GetReceiverOperand());
2248 if (NeedsChecks()) {
2249 __ JumpIfSmi(
rax, &wrap);
2261 if (NeedsChecks()) {
2264 if (RecordCallTarget()) {
2277 __ PopReturnAddressTo(
rcx);
2279 __ PushReturnAddressFrom(
rcx);
2280 __ Set(
rax, argc_ + 1);
2282 __ GetBuiltinEntry(
rdx, Builtins::CALL_FUNCTION_PROXY);
2284 Handle<Code> adaptor =
2285 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline();
2286 __ jmp(adaptor, RelocInfo::CODE_TARGET);
2291 __ bind(&non_function);
2292 __ movp(args.GetReceiverOperand(),
rdi);
2295 __ GetBuiltinEntry(
rdx, Builtins::CALL_NON_FUNCTION);
2296 Handle<Code> adaptor =
2297 isolate->builtins()->ArgumentsAdaptorTrampoline();
2298 __ Jump(adaptor, RelocInfo::CODE_TARGET);
2301 if (CallAsMethod()) {
2310 __ movp(args.GetReceiverOperand(),
rax);
2322 Label slow, non_function_call;
2325 __ JumpIfSmi(
rdi, &non_function_call);
2330 if (RecordCallTarget()) {
2331 GenerateRecordCallTarget(masm);
2334 if (FLAG_pretenuring_call_new) {
2341 Label feedback_register_initialized;
2346 __ j(
equal, &feedback_register_initialized);
2347 __ LoadRoot(
rbx, Heap::kUndefinedValueRootIndex);
2348 __ bind(&feedback_register_initialized);
2351 __ AssertUndefinedOrAllocationSite(
rbx);
2355 Register jmp_reg =
rcx;
2369 __ GetBuiltinEntry(
rdx, Builtins::CALL_FUNCTION_PROXY_AS_CONSTRUCTOR);
2372 __ bind(&non_function_call);
2373 __ GetBuiltinEntry(
rdx, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR);
2377 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
2378 RelocInfo::CODE_TARGET);
2382 bool CEntryStub::NeedsImmovableCode() {
2387 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
2395 BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate);
2399 void CodeStub::GenerateFPStubs(Isolate* isolate) {
2405 stub.GetCode(isolate);
2407 save_doubles.GetCode(isolate);
2411 void CEntryStub::GenerateCore(MacroAssembler* masm,
2412 Label* throw_normal_exception,
2413 Label* throw_termination_exception,
2415 bool always_allocate_scope) {
2430 if (FLAG_debug_code) {
2431 __ CheckStackAlignment();
2439 __ Move(
arg_reg_2, ExternalReference::isolate_address(masm->isolate()));
2442 ExternalReference::perform_gc_function(masm->isolate()));
2446 ExternalReference scope_depth =
2447 ExternalReference::heap_always_allocate_scope_depth(masm->isolate());
2448 if (always_allocate_scope) {
2449 Operand scope_depth_operand = masm->ExternalOperand(scope_depth);
2450 __ incl(scope_depth_operand);
2458 if (result_size_ < 2) {
2463 __ Move(
r8, ExternalReference::isolate_address(masm->isolate()));
2471 __ Move(
r9, ExternalReference::isolate_address(masm->isolate()));
2478 __ Move(
rdx, ExternalReference::isolate_address(masm->isolate()));
2483 if (always_allocate_scope) {
2484 Operand scope_depth_operand = masm->ExternalOperand(scope_depth);
2485 __ decl(scope_depth_operand);
2489 Label failure_returned;
2493 if (result_size_ > 1) {
2505 __ j(
zero, &failure_returned);
2508 __ LeaveExitFrame(save_doubles_);
2512 __ bind(&failure_returned);
2518 __ j(
zero, &retry, Label::kNear);
2521 ExternalReference pending_exception_address(
2522 Isolate::kPendingExceptionAddress, masm->isolate());
2523 Operand pending_exception_operand =
2524 masm->ExternalOperand(pending_exception_address);
2525 __ movp(
rax, pending_exception_operand);
2528 pending_exception_operand =
2529 masm->ExternalOperand(pending_exception_address);
2530 __ LoadRoot(
rdx, Heap::kTheHoleValueRootIndex);
2531 __ movp(pending_exception_operand,
rdx);
2535 __ CompareRoot(
rax, Heap::kTerminationExceptionRootIndex);
2536 __ j(
equal, throw_termination_exception);
2539 __ jmp(throw_normal_exception);
2562 int arg_stack_space = (result_size_ < 2 ? 2 : 4);
2564 int arg_stack_space = 0;
2566 __ EnterExitFrame(arg_stack_space, save_doubles_);
2580 Label throw_normal_exception;
2581 Label throw_termination_exception;
2585 &throw_normal_exception,
2586 &throw_termination_exception,
2592 &throw_normal_exception,
2593 &throw_termination_exception,
2601 &throw_normal_exception,
2602 &throw_termination_exception,
2606 { FrameScope scope(masm, StackFrame::MANUAL);
2607 __ PrepareCallCFunction(0);
2609 ExternalReference::out_of_memory_function(masm->isolate()), 0);
2612 __ bind(&throw_termination_exception);
2613 __ ThrowUncatchable(
rax);
2615 __ bind(&throw_normal_exception);
2621 Label invoke, handler_entry, exit;
2622 Label not_outermost_js, not_outermost_js_2;
2627 MacroAssembler::NoRootArrayScope uninitialized_root_register(masm);
2633 int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY;
2653 __ subp(
rsp, Immediate(EntryFrameConstants::kXMMRegistersBlockSize));
2654 __ movdqu(Operand(
rsp, EntryFrameConstants::kXMMRegisterSize * 0),
xmm6);
2655 __ movdqu(Operand(
rsp, EntryFrameConstants::kXMMRegisterSize * 1),
xmm7);
2656 __ movdqu(Operand(
rsp, EntryFrameConstants::kXMMRegisterSize * 2),
xmm8);
2657 __ movdqu(Operand(
rsp, EntryFrameConstants::kXMMRegisterSize * 3),
xmm9);
2658 __ movdqu(Operand(
rsp, EntryFrameConstants::kXMMRegisterSize * 4),
xmm10);
2659 __ movdqu(Operand(
rsp, EntryFrameConstants::kXMMRegisterSize * 5),
xmm11);
2660 __ movdqu(Operand(
rsp, EntryFrameConstants::kXMMRegisterSize * 6),
xmm12);
2661 __ movdqu(Operand(
rsp, EntryFrameConstants::kXMMRegisterSize * 7),
xmm13);
2662 __ movdqu(Operand(
rsp, EntryFrameConstants::kXMMRegisterSize * 8),
xmm14);
2663 __ movdqu(Operand(
rsp, EntryFrameConstants::kXMMRegisterSize * 9),
xmm15);
2668 __ InitializeSmiConstantRegister();
2669 __ InitializeRootRegister();
2672 Isolate* isolate = masm->isolate();
2675 ExternalReference c_entry_fp(Isolate::kCEntryFPAddress, isolate);
2677 Operand c_entry_fp_operand = masm->ExternalOperand(c_entry_fp);
2678 __ Push(c_entry_fp_operand);
2682 ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate);
2688 __ Store(js_entry_sp,
rax);
2691 __ bind(¬_outermost_js);
2698 __ bind(&handler_entry);
2699 handler_offset_ = handler_entry.pos();
2702 ExternalReference pending_exception(Isolate::kPendingExceptionAddress,
2704 __ Store(pending_exception,
rax);
2711 __ PushTryHandler(StackHandler::JS_ENTRY, 0);
2714 __ LoadRoot(
rax, Heap::kTheHoleValueRootIndex);
2715 __ Store(pending_exception,
rax);
2718 __ Push(Immediate(0));
2726 ExternalReference construct_entry(Builtins::kJSConstructEntryTrampoline,
2730 ExternalReference entry(Builtins::kJSEntryTrampoline, isolate);
2746 __ bind(¬_outermost_js_2);
2749 { Operand c_entry_fp_operand = masm->ExternalOperand(c_entry_fp);
2750 __ Pop(c_entry_fp_operand);
2756 __ movdqu(
xmm6, Operand(
rsp, EntryFrameConstants::kXMMRegisterSize * 0));
2757 __ movdqu(
xmm7, Operand(
rsp, EntryFrameConstants::kXMMRegisterSize * 1));
2758 __ movdqu(
xmm8, Operand(
rsp, EntryFrameConstants::kXMMRegisterSize * 2));
2759 __ movdqu(
xmm9, Operand(
rsp, EntryFrameConstants::kXMMRegisterSize * 3));
2760 __ movdqu(
xmm10, Operand(
rsp, EntryFrameConstants::kXMMRegisterSize * 4));
2761 __ movdqu(
xmm11, Operand(
rsp, EntryFrameConstants::kXMMRegisterSize * 5));
2762 __ movdqu(
xmm12, Operand(
rsp, EntryFrameConstants::kXMMRegisterSize * 6));
2763 __ movdqu(
xmm13, Operand(
rsp, EntryFrameConstants::kXMMRegisterSize * 7));
2764 __ movdqu(
xmm14, Operand(
rsp, EntryFrameConstants::kXMMRegisterSize * 8));
2765 __ movdqu(
xmm15, Operand(
rsp, EntryFrameConstants::kXMMRegisterSize * 9));
2766 __ addp(
rsp, Immediate(EntryFrameConstants::kXMMRegistersBlockSize));
2779 __ addp(
rsp, Immediate(2 * kPointerSize));
2802 static const int kOffsetToMapCheckValue = 2;
2803 static const int kOffsetToResultValue = 18;
2808 static const unsigned int kWordBeforeMapCheckValue = 0xBA49FF78;
2813 static const unsigned int kWordBeforeResultValue = 0x458B4906;
2816 int extra_argument_offset = HasCallSiteInlineCheck() ? 1 : 0;
2820 StackArgumentsAccessor args(
rsp, 2 + extra_argument_offset,
2822 __ movp(
rax, args.GetArgumentOperand(0));
2823 __ JumpIfSmi(
rax, &slow);
2832 __ movp(
rdx, args.GetArgumentOperand(1));
2837 if (!HasCallSiteInlineCheck()) {
2840 __ CompareRoot(
rdx, Heap::kInstanceofCacheFunctionRootIndex);
2842 __ CompareRoot(
rax, Heap::kInstanceofCacheMapRootIndex);
2844 __ LoadRoot(
rax, Heap::kInstanceofCacheAnswerRootIndex);
2845 __ ret(2 * kPointerSize);
2849 __ TryGetFunctionPrototype(
rdx,
rbx, &slow,
true);
2852 __ JumpIfSmi(
rbx, &slow);
2862 if (!HasCallSiteInlineCheck()) {
2863 __ StoreRoot(
rdx, Heap::kInstanceofCacheFunctionRootIndex);
2864 __ StoreRoot(
rax, Heap::kInstanceofCacheMapRootIndex);
2869 if (FLAG_debug_code) {
2870 __ movl(
rdi, Immediate(kWordBeforeMapCheckValue));
2872 __ Assert(
equal, kInstanceofStubUnexpectedCallSiteCacheCheck);
2882 Label loop, is_instance, is_not_instance;
2886 __ j(
equal, &is_instance, Label::kNear);
2890 __ j(
equal, &is_not_instance, Label::kNear);
2895 __ bind(&is_instance);
2896 if (!HasCallSiteInlineCheck()) {
2900 __ StoreRoot(
rax, Heap::kInstanceofCacheAnswerRootIndex);
2903 int true_offset = 0x100 +
2906 ASSERT(true_offset >= 0 && true_offset < 0x100);
2907 __ movl(
rax, Immediate(true_offset));
2911 if (FLAG_debug_code) {
2912 __ movl(
rax, Immediate(kWordBeforeResultValue));
2914 __ Assert(
equal, kInstanceofStubUnexpectedCallSiteCacheMov);
2918 __ ret((2 + extra_argument_offset) * kPointerSize);
2920 __ bind(&is_not_instance);
2921 if (!HasCallSiteInlineCheck()) {
2926 int false_offset = 0x100 +
2929 ASSERT(false_offset >= 0 && false_offset < 0x100);
2930 __ movl(
rax, Immediate(false_offset));
2934 if (FLAG_debug_code) {
2935 __ movl(
rax, Immediate(kWordBeforeResultValue));
2937 __ Assert(
equal, kInstanceofStubUnexpectedCallSiteCacheMov);
2940 __ ret((2 + extra_argument_offset) * kPointerSize);
2944 if (HasCallSiteInlineCheck()) {
2946 __ PopReturnAddressTo(
rcx);
2948 __ PushReturnAddressFrom(
rcx);
2967 Label got_char_code;
2968 Label sliced_string;
2971 __ JumpIfSmi(object_, receiver_not_string_);
2981 __ JumpIfNotSmi(index_, &index_not_smi_);
2982 __ bind(&got_smi_index_);
2988 __ SmiToInteger32(index_, index_);
2991 masm, object_, index_, result_, &call_runtime_);
2993 __ Integer32ToSmi(result_, result_);
2999 MacroAssembler* masm,
3000 const RuntimeCallHelper& call_helper) {
3001 __ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase);
3003 Factory* factory = masm->isolate()->factory();
3005 __ bind(&index_not_smi_);
3008 factory->heap_number_map(),
3011 call_helper.BeforeCall(masm);
3015 __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero, 1);
3019 __ CallRuntime(Runtime::kHiddenNumberToSmi, 1);
3021 if (!index_.
is(
rax)) {
3024 __ movp(index_,
rax);
3030 call_helper.AfterCall(masm);
3032 __ JumpIfNotSmi(index_, index_out_of_range_);
3034 __ jmp(&got_smi_index_);
3039 __ bind(&call_runtime_);
3040 call_helper.BeforeCall(masm);
3042 __ Integer32ToSmi(index_, index_);
3044 __ CallRuntime(Runtime::kHiddenStringCharCodeAt, 2);
3045 if (!result_.
is(
rax)) {
3046 __ movp(result_,
rax);
3048 call_helper.AfterCall(masm);
3051 __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase);
3060 __ JumpIfNotSmi(code_, &slow_case_);
3064 __ LoadRoot(result_, Heap::kSingleCharacterStringCacheRootIndex);
3068 __ CompareRoot(result_, Heap::kUndefinedValueRootIndex);
3075 MacroAssembler* masm,
3076 const RuntimeCallHelper& call_helper) {
3077 __ Abort(kUnexpectedFallthroughToCharFromCodeSlowCase);
3079 __ bind(&slow_case_);
3080 call_helper.BeforeCall(masm);
3082 __ CallRuntime(Runtime::kCharFromCode, 1);
3083 if (!result_.
is(
rax)) {
3084 __ movp(result_,
rax);
3086 call_helper.AfterCall(masm);
3089 __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase);
3108 __ testl(count, count);
3109 __ j(
zero, &done, Label::kNear);
3114 __ addl(count, count);
3119 __ testl(count, Immediate(~(kPointerSize - 1)));
3120 __ j(
zero, &last_bytes, Label::kNear);
3129 __ andp(count, Immediate(kPointerSize - 1));
3132 __ bind(&last_bytes);
3133 __ testl(count, count);
3134 __ j(
zero, &done, Label::kNear);
3155 __ LoadRoot(scratch, Heap::kHashSeedRootIndex);
3156 __ SmiToInteger32(scratch, scratch);
3157 __ addl(scratch, character);
3158 __ movl(hash, scratch);
3159 __ shll(scratch, Immediate(10));
3160 __ addl(hash, scratch);
3162 __ movl(scratch, hash);
3163 __ shrl(scratch, Immediate(6));
3164 __ xorl(hash, scratch);
3173 __ addl(hash, character);
3175 __ movl(scratch, hash);
3176 __ shll(scratch, Immediate(10));
3177 __ addl(hash, scratch);
3179 __ movl(scratch, hash);
3180 __ shrl(scratch, Immediate(6));
3181 __ xorl(hash, scratch);
3189 __ leal(hash, Operand(hash, hash,
times_8, 0));
3191 __ movl(scratch, hash);
3192 __ shrl(scratch, Immediate(11));
3193 __ xorl(hash, scratch);
3195 __ movl(scratch, hash);
3196 __ shll(scratch, Immediate(15));
3197 __ addl(hash, scratch);
3202 Label hash_not_zero;
3205 __ bind(&hash_not_zero);
3209 void SubStringStub::Generate(MacroAssembler* masm) {
3218 enum SubStringStubArgumentIndices {
3219 STRING_ARGUMENT_INDEX,
3220 FROM_ARGUMENT_INDEX,
3222 SUB_STRING_ARGUMENT_COUNT
3225 StackArgumentsAccessor args(
rsp, SUB_STRING_ARGUMENT_COUNT,
3229 __ movp(
rax, args.GetArgumentOperand(STRING_ARGUMENT_INDEX));
3239 __ movp(
rcx, args.GetArgumentOperand(TO_ARGUMENT_INDEX));
3240 __ movp(
rdx, args.GetArgumentOperand(FROM_ARGUMENT_INDEX));
3241 __ JumpUnlessBothNonNegativeSmi(
rcx,
rdx, &runtime);
3245 Label not_original_string;
3247 __ j(
below, ¬_original_string, Label::kNear);
3251 Counters* counters = masm->isolate()->counters();
3252 __ IncrementCounter(counters->sub_string_native(), 1);
3253 __ ret(SUB_STRING_ARGUMENT_COUNT * kPointerSize);
3254 __ bind(¬_original_string);
3268 Label underlying_unpacked, sliced_string, seq_or_external_string;
3273 __ j(
zero, &seq_or_external_string, Label::kNear);
3276 __ j(
not_zero, &sliced_string, Label::kNear);
3280 Heap::kempty_stringRootIndex);
3286 __ jmp(&underlying_unpacked, Label::kNear);
3288 __ bind(&sliced_string);
3295 __ jmp(&underlying_unpacked, Label::kNear);
3297 __ bind(&seq_or_external_string);
3301 __ bind(&underlying_unpacked);
3303 if (FLAG_string_slices) {
3313 __ j(
less, ©_routine);
3319 Label two_byte_slice, set_slice_header;
3323 __ j(
zero, &two_byte_slice, Label::kNear);
3324 __ AllocateAsciiSlicedString(
rax,
rbx,
r14, &runtime);
3325 __ jmp(&set_slice_header, Label::kNear);
3326 __ bind(&two_byte_slice);
3327 __ AllocateTwoByteSlicedString(
rax,
rbx,
r14, &runtime);
3328 __ bind(&set_slice_header);
3335 __ IncrementCounter(counters->sub_string_native(), 1);
3336 __ ret(3 * kPointerSize);
3338 __ bind(©_routine);
3347 Label two_byte_sequential, sequential_string;
3351 __ j(
zero, &sequential_string);
3363 __ bind(&sequential_string);
3366 __ j(
zero, &two_byte_sequential);
3375 SmiIndex smi_as_index = masm->SmiToIndex(
rdx,
rdx,
times_1);
3376 __ leap(
rsi, Operand(
rdi, smi_as_index.reg, smi_as_index.scale,
3389 __ IncrementCounter(counters->sub_string_native(), 1);
3390 __ ret(SUB_STRING_ARGUMENT_COUNT * kPointerSize);
3392 __ bind(&two_byte_sequential);
3400 SmiIndex smi_as_index = masm->SmiToIndex(
rdx,
rdx,
times_2);
3401 __ leap(
rsi, Operand(
rdi, smi_as_index.reg, smi_as_index.scale,
3414 __ IncrementCounter(counters->sub_string_native(), 1);
3415 __ ret(SUB_STRING_ARGUMENT_COUNT * kPointerSize);
3419 __ TailCallRuntime(Runtime::kHiddenSubString, 3, 1);
3421 __ bind(&single_char);
3426 StringCharAtGenerator generator(
3428 generator.GenerateFast(masm);
3429 __ ret(SUB_STRING_ARGUMENT_COUNT * kPointerSize);
3430 generator.SkipSlow(masm, &runtime);
3438 Register scratch2) {
3439 Register length = scratch1;
3442 Label check_zero_length;
3445 __ j(
equal, &check_zero_length, Label::kNear);
3450 Label compare_chars;
3451 __ bind(&check_zero_length);
3454 __ j(
not_zero, &compare_chars, Label::kNear);
3459 __ bind(&compare_chars);
3460 Label strings_not_equal;
3461 GenerateAsciiCharsCompareLoop(masm, left, right, length, scratch2,
3462 &strings_not_equal, Label::kNear);
3469 __ bind(&strings_not_equal);
3481 Register scratch4) {
3488 __ movp(scratch4, scratch1);
3493 const Register length_difference = scratch4;
3495 __ j(
less, &left_shorter, Label::kNear);
3499 __ SmiSub(scratch1, scratch1, length_difference);
3500 __ bind(&left_shorter);
3502 const Register min_length = scratch1;
3504 Label compare_lengths;
3506 __ SmiTest(min_length);
3507 __ j(
zero, &compare_lengths, Label::kNear);
3510 Label result_not_equal;
3511 GenerateAsciiCharsCompareLoop(masm, left, right, min_length, scratch2,
3519 __ bind(&compare_lengths);
3520 __ SmiTest(length_difference);
3521 Label length_not_equal;
3522 __ j(
not_zero, &length_not_equal, Label::kNear);
3528 Label result_greater;
3530 __ bind(&length_not_equal);
3531 __ j(
greater, &result_greater, Label::kNear);
3532 __ jmp(&result_less, Label::kNear);
3533 __ bind(&result_not_equal);
3535 __ j(
above, &result_greater, Label::kNear);
3536 __ bind(&result_less);
3543 __ bind(&result_greater);
3549 void StringCompareStub::GenerateAsciiCharsCompareLoop(
3550 MacroAssembler* masm,
3555 Label* chars_not_equal,
3556 Label::Distance near_jump) {
3560 __ SmiToInteger32(length, length);
3566 Register index = length;
3571 __ movb(scratch, Operand(left, index,
times_1, 0));
3572 __ cmpb(scratch, Operand(right, index,
times_1, 0));
3579 void StringCompareStub::Generate(MacroAssembler* masm) {
3588 __ movp(
rdx, args.GetArgumentOperand(0));
3589 __ movp(
rax, args.GetArgumentOperand(1));
3596 Counters* counters = masm->isolate()->counters();
3597 __ IncrementCounter(counters->string_compare_native(), 1);
3598 __ ret(2 * kPointerSize);
3603 __ JumpIfNotBothSequentialAsciiStrings(
rdx,
rax,
rcx,
rbx, &runtime);
3606 __ IncrementCounter(counters->string_compare_native(), 1);
3608 __ PopReturnAddressTo(
rcx);
3609 __ addp(
rsp, Immediate(2 * kPointerSize));
3610 __ PushReturnAddressFrom(
rcx);
3616 __ TailCallRuntime(Runtime::kHiddenStringCompare, 2, 1);
3621 int argc = arguments_count();
3623 StackArgumentsAccessor args(
rsp, argc);
3627 __ ret((argc + 1) * kPointerSize);
3631 Isolate* isolate = masm->isolate();
3634 __ TailCallExternalReference(
3635 ExternalReference(Builtins::c_ArrayPush, isolate), argc + 1, 1);
3639 Label call_builtin, attempt_to_grow_elements, with_write_barrier;
3647 isolate->factory()->fixed_array_map());
3654 __ addl(
rax, Immediate(argc));
3663 __ j(
greater, &attempt_to_grow_elements);
3666 __ movp(
rcx, args.GetArgumentOperand(1));
3667 __ JumpIfNotSmi(
rcx, &with_write_barrier);
3678 __ movp(
rcx, args.GetArgumentOperand(1));
3679 __ StoreNumberToDoubleElements(
3687 __ ret((argc + 1) * kPointerSize);
3690 __ bind(&call_builtin);
3691 __ TailCallExternalReference(
3692 ExternalReference(Builtins::c_ArrayPush, isolate), argc + 1, 1);
3696 __ bind(&with_write_barrier);
3699 if (FLAG_trace_elements_transitions)
__ jmp(&call_builtin);
3702 isolate->factory()->heap_number_map());
3712 const int origin_offset = header_size + elements_kind() *
kPointerSize;
3717 const int target_offset = header_size + target_kind *
kPointerSize;
3737 __ ret((argc + 1) * kPointerSize);
3739 __ bind(&attempt_to_grow_elements);
3740 if (!FLAG_inline_new) {
3741 __ bind(&call_builtin);
3742 __ TailCallExternalReference(
3743 ExternalReference(Builtins::c_ArrayPush, isolate), argc + 1, 1);
3747 __ movp(
rbx, args.GetArgumentOperand(1));
3750 Label no_fast_elements_check;
3751 __ JumpIfSmi(
rbx, &no_fast_elements_check);
3753 __ CheckFastObjectElements(
rcx, &call_builtin, Label::kFar);
3754 __ bind(&no_fast_elements_check);
3756 ExternalReference new_space_allocation_top =
3757 ExternalReference::new_space_allocation_top_address(isolate);
3758 ExternalReference new_space_allocation_limit =
3759 ExternalReference::new_space_allocation_limit_address(isolate);
3761 const int kAllocationDelta = 4;
3762 ASSERT(kAllocationDelta >= argc);
3772 __ addp(
rcx, Immediate(kAllocationDelta * kPointerSize));
3773 Operand limit_operand = masm->ExternalOperand(new_space_allocation_limit);
3774 __ cmpp(
rcx, limit_operand);
3778 __ Store(new_space_allocation_top,
rcx);
3784 for (
int i = 1; i < kAllocationDelta; i++) {
3798 __ movp(
rdx, args.GetReceiverOperand());
3808 __ ret((argc + 1) * kPointerSize);
3810 __ bind(&call_builtin);
3811 __ TailCallExternalReference(
3812 ExternalReference(Builtins::c_ArrayPush, isolate), argc + 1, 1);
3816 void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) {
3822 Isolate* isolate = masm->isolate();
3827 __ Move(
rcx,
handle(isolate->heap()->undefined_value()));
3830 if (FLAG_debug_code) {
3834 isolate->factory()->allocation_site_map());
3835 __ Assert(
equal, kExpectedAllocationSite);
3840 BinaryOpWithAllocationSiteStub stub(state_);
3841 __ TailCallStub(&stub);
3845 void ICCompareStub::GenerateSmis(MacroAssembler* masm) {
3848 __ JumpIfNotBothSmi(
rdx,
rax, &miss, Label::kNear);
3850 if (GetCondition() ==
equal) {
3869 void ICCompareStub::GenerateNumbers(MacroAssembler* masm) {
3873 Label unordered, maybe_undefined1, maybe_undefined2;
3877 __ JumpIfNotSmi(
rdx, &miss);
3880 __ JumpIfNotSmi(
rax, &miss);
3884 Label done, left, left_smi, right_smi;
3885 __ JumpIfSmi(
rax, &right_smi, Label::kNear);
3886 __ CompareMap(
rax, masm->isolate()->factory()->heap_number_map());
3889 __ jmp(&left, Label::kNear);
3890 __ bind(&right_smi);
3895 __ JumpIfSmi(
rdx, &left_smi, Label::kNear);
3896 __ CompareMap(
rdx, masm->isolate()->factory()->heap_number_map());
3913 __ movl(
rax, Immediate(0));
3914 __ movl(
rcx, Immediate(0));
3919 __ bind(&unordered);
3920 __ bind(&generic_stub);
3923 __ jmp(stub.GetCode(masm->isolate()), RelocInfo::CODE_TARGET);
3925 __ bind(&maybe_undefined1);
3927 __ Cmp(
rax, masm->isolate()->factory()->undefined_value());
3929 __ JumpIfSmi(
rdx, &unordered);
3935 __ bind(&maybe_undefined2);
3937 __ Cmp(
rdx, masm->isolate()->factory()->undefined_value());
3946 void ICCompareStub::GenerateInternalizedStrings(MacroAssembler* masm) {
3951 Register left =
rdx;
3952 Register right =
rax;
3953 Register tmp1 =
rcx;
3954 Register tmp2 =
rbx;
3958 Condition cond = masm->CheckEitherSmi(left, right, tmp1);
3959 __ j(cond, &miss, Label::kNear);
3973 __ cmpp(left, right);
3989 void ICCompareStub::GenerateUniqueNames(MacroAssembler* masm) {
3994 Register left =
rdx;
3995 Register right =
rax;
3996 Register tmp1 =
rcx;
3997 Register tmp2 =
rbx;
4001 Condition cond = masm->CheckEitherSmi(left, right, tmp1);
4002 __ j(cond, &miss, Label::kNear);
4011 __ JumpIfNotUniqueName(tmp1, &miss, Label::kNear);
4012 __ JumpIfNotUniqueName(tmp2, &miss, Label::kNear);
4016 __ cmpp(left, right);
4032 void ICCompareStub::GenerateStrings(MacroAssembler* masm) {
4039 Register left =
rdx;
4040 Register right =
rax;
4041 Register tmp1 =
rcx;
4042 Register tmp2 =
rbx;
4043 Register tmp3 =
rdi;
4046 Condition cond = masm->CheckEitherSmi(left, right, tmp1);
4055 __ movp(tmp3, tmp1);
4063 __ cmpp(left, right);
4086 __ bind(&do_compare);
4091 __ JumpIfNotBothSequentialAsciiStrings(left, right, tmp1, tmp2, &runtime);
4096 masm, left, right, tmp1, tmp2);
4104 __ PopReturnAddressTo(tmp1);
4107 __ PushReturnAddressFrom(tmp1);
4109 __ TailCallRuntime(Runtime::kStringEquals, 2, 1);
4111 __ TailCallRuntime(Runtime::kHiddenStringCompare, 2, 1);
4119 void ICCompareStub::GenerateObjects(MacroAssembler* masm) {
4123 __ j(either_smi, &miss, Label::kNear);
4139 void ICCompareStub::GenerateKnownObjects(MacroAssembler* masm) {
4142 __ j(either_smi, &miss, Label::kNear);
4146 __ Cmp(
rcx, known_map_);
4148 __ Cmp(
rbx, known_map_);
4159 void ICCompareStub::GenerateMiss(MacroAssembler* masm) {
4162 ExternalReference miss =
4163 ExternalReference(IC_Utility(IC::kCompareIC_Miss), masm->isolate());
4171 __ CallExternalReference(miss, 3);
4187 Register properties,
4190 ASSERT(name->IsUniqueName());
4196 for (
int i = 0; i < kInlinedProbes; i++) {
4199 Register index =
r0;
4201 __ SmiToInteger32(index,
FieldOperand(properties, kCapacityOffset));
4204 Immediate(name->Hash() + NameDictionary::GetProbeOffset(i)));
4208 __ leap(index, Operand(index, index,
times_2, 0));
4210 Register entity_name =
r0;
4213 __ movp(entity_name, Operand(properties,
4217 __ Cmp(entity_name, masm->isolate()->factory()->undefined_value());
4221 __ Cmp(entity_name, Handle<Name>(name));
4226 __ CompareRoot(entity_name, Heap::kTheHoleValueRootIndex);
4227 __ j(
equal, &good, Label::kNear);
4237 __ Push(Handle<Object>(name));
4238 __ Push(Immediate(name->Hash()));
4257 ASSERT(!elements.is(r0));
4258 ASSERT(!elements.is(r1));
4262 __ AssertName(name);
4267 for (
int i = 0; i < kInlinedProbes; i++) {
4272 __ addl(r1, Immediate(NameDictionary::GetProbeOffset(i)));
4278 __ leap(r1, Operand(r1, r1,
times_2, 0));
4314 Label in_dictionary, maybe_in_dictionary, not_in_dictionary;
4316 Register scratch = result_;
4318 __ SmiToInteger32(scratch,
FieldOperand(dictionary_, kCapacityOffset));
4329 for (
int i = kInlinedProbes; i < kTotalProbes; i++) {
4331 __ movp(scratch, args.GetArgumentOperand(1));
4333 __ addl(scratch, Immediate(NameDictionary::GetProbeOffset(i)));
4335 __ andp(scratch, Operand(
rsp, 0));
4339 __ leap(index_, Operand(scratch, scratch,
times_2, 0));
4342 __ movp(scratch, Operand(dictionary_,
4347 __ Cmp(scratch, masm->isolate()->factory()->undefined_value());
4348 __ j(
equal, ¬_in_dictionary);
4351 __ cmpp(scratch, args.GetArgumentOperand(0));
4362 &maybe_in_dictionary);
4366 __ bind(&maybe_in_dictionary);
4371 __ movp(scratch, Immediate(0));
4373 __ ret(2 * kPointerSize);
4376 __ bind(&in_dictionary);
4377 __ movp(scratch, Immediate(1));
4379 __ ret(2 * kPointerSize);
4381 __ bind(¬_in_dictionary);
4382 __ movp(scratch, Immediate(0));
4384 __ ret(2 * kPointerSize);
4391 stub1.GetCode(isolate);
4393 stub2.GetCode(isolate);
4397 bool CodeStub::CanUseFPRegisters() {
4406 void RecordWriteStub::Generate(MacroAssembler* masm) {
4407 Label skip_to_incremental_noncompacting;
4408 Label skip_to_incremental_compacting;
4415 __ jmp(&skip_to_incremental_noncompacting, Label::kNear);
4416 __ jmp(&skip_to_incremental_compacting, Label::kFar);
4419 __ RememberedSetHelper(object_,
4428 __ bind(&skip_to_incremental_noncompacting);
4431 __ bind(&skip_to_incremental_compacting);
4441 void RecordWriteStub::GenerateIncremental(MacroAssembler* masm, Mode
mode) {
4445 Label dont_need_remembered_set;
4447 __ movp(regs_.scratch0(), Operand(regs_.address(), 0));
4448 __ JumpIfNotInNewSpace(regs_.scratch0(),
4450 &dont_need_remembered_set);
4452 __ CheckPageFlag(regs_.object(),
4456 &dont_need_remembered_set);
4460 CheckNeedsToInformIncrementalMarker(
4461 masm, kUpdateRememberedSetOnNoNeedToInformIncrementalMarker, mode);
4462 InformIncrementalMarker(masm);
4463 regs_.Restore(masm);
4464 __ RememberedSetHelper(object_,
4470 __ bind(&dont_need_remembered_set);
4473 CheckNeedsToInformIncrementalMarker(
4474 masm, kReturnOnNoNeedToInformIncrementalMarker, mode);
4475 InformIncrementalMarker(masm);
4476 regs_.Restore(masm);
4481 void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm) {
4482 regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode_);
4485 ASSERT(!address.is(regs_.object()));
4487 __ Move(address, regs_.address());
4492 ExternalReference::isolate_address(masm->isolate()));
4493 int argument_count = 3;
4495 AllowExternalCallThatCantCauseGC scope(masm);
4496 __ PrepareCallCFunction(argument_count);
4498 ExternalReference::incremental_marking_record_write_function(
4501 regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode_);
4505 void RecordWriteStub::CheckNeedsToInformIncrementalMarker(
4506 MacroAssembler* masm,
4507 OnNoNeedToInformIncrementalMarker on_no_need,
4510 Label need_incremental;
4511 Label need_incremental_pop_object;
4514 __ andp(regs_.scratch0(), regs_.object());
4515 __ movp(regs_.scratch1(),
4516 Operand(regs_.scratch0(),
4518 __ subp(regs_.scratch1(), Immediate(1));
4519 __ movp(Operand(regs_.scratch0(),
4526 __ JumpIfBlack(regs_.object(),
4532 regs_.Restore(masm);
4533 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
4534 __ RememberedSetHelper(object_,
4546 __ movp(regs_.scratch0(), Operand(regs_.address(), 0));
4549 Label ensure_not_white;
4551 __ CheckPageFlag(regs_.scratch0(),
4558 __ CheckPageFlag(regs_.object(),
4564 __ bind(&ensure_not_white);
4569 __ Push(regs_.object());
4570 __ EnsureNotWhite(regs_.scratch0(),
4573 &need_incremental_pop_object,
4575 __ Pop(regs_.object());
4577 regs_.Restore(masm);
4578 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
4579 __ RememberedSetHelper(object_,
4588 __ bind(&need_incremental_pop_object);
4589 __ Pop(regs_.object());
4591 __ bind(&need_incremental);
4597 void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) {
4608 Label double_elements;
4610 Label slow_elements;
4611 Label fast_elements;
4615 __ movp(
rdx, args.GetArgumentOperand(1));
4616 __ movp(
rbx, args.GetArgumentOperand(0));
4619 __ CheckFastElements(
rdi, &double_elements);
4622 __ JumpIfSmi(
rax, &smi_element);
4623 __ CheckFastSmiElements(
rdi, &fast_elements);
4628 __ bind(&slow_elements);
4629 __ PopReturnAddressTo(
rdi);
4636 __ PushReturnAddressFrom(
rdi);
4637 __ TailCallRuntime(Runtime::kStoreArrayLiteralElement, 5, 1);
4640 __ bind(&fast_elements);
4655 __ bind(&smi_element);
4663 __ bind(&double_elements);
4667 __ StoreNumberToDoubleElements(
rax,
4676 void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
4678 __ Call(ces.GetCode(masm->isolate()), RelocInfo::CODE_TARGET);
4679 int parameter_count_offset =
4682 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE);
4683 __ PopReturnAddressTo(
rcx);
4693 if (masm->isolate()->function_entry_hook() !=
NULL) {
4695 masm->CallStub(&stub);
4700 void ProfileEntryHookStub::Generate(MacroAssembler* masm) {
4703 const size_t kNumSavedRegisters = 2;
4722 AllowExternalCallThatCantCauseGC scope(masm);
4724 const int kArgumentCount = 2;
4725 __ PrepareCallCFunction(kArgumentCount);
4726 __ CallCFunction(
rax, kArgumentCount);
4738 static void CreateArrayDispatch(MacroAssembler* masm,
4742 __ TailCallStub(&stub);
4746 for (
int i = 0; i <= last_index; ++i) {
4749 __ cmpl(
rdx, Immediate(kind));
4752 __ TailCallStub(&stub);
4757 __ Abort(kUnexpectedElementsKindInArrayConstructor);
4764 static void CreateArrayDispatchOneArgument(MacroAssembler* masm,
4772 Handle<Object> undefined_sentinel(
4773 masm->isolate()->heap()->undefined_value(),
4776 Label normal_sequence;
4786 __ testb(
rdx, Immediate(1));
4787 __ j(not_zero, &normal_sequence);
4792 __ movp(
rcx, args.GetArgumentOperand(0));
4794 __ j(zero, &normal_sequence);
4800 ArraySingleArgumentConstructorStub stub_holey(holey_initial,
4802 __ TailCallStub(&stub_holey);
4804 __ bind(&normal_sequence);
4805 ArraySingleArgumentConstructorStub stub(initial,
4807 __ TailCallStub(&stub);
4813 if (FLAG_debug_code) {
4814 Handle<Map> allocation_site_map =
4815 masm->isolate()->factory()->allocation_site_map();
4817 __ Assert(
equal, kExpectedAllocationSite);
4827 __ bind(&normal_sequence);
4830 for (
int i = 0; i <= last_index; ++i) {
4833 __ cmpl(
rdx, Immediate(kind));
4835 ArraySingleArgumentConstructorStub stub(kind);
4836 __ TailCallStub(&stub);
4841 __ Abort(kUnexpectedElementsKindInArrayConstructor);
4849 static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) {
4852 for (
int i = 0; i <= to_index; ++i) {
4855 stub.GetCode(isolate);
4858 stub1.GetCode(isolate);
4865 ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>(
4867 ArrayConstructorStubAheadOfTimeHelper<ArraySingleArgumentConstructorStub>(
4869 ArrayConstructorStubAheadOfTimeHelper<ArrayNArgumentsConstructorStub>(
4877 for (
int i = 0; i < 2; i++) {
4879 InternalArrayNoArgumentConstructorStub stubh1(kinds[i]);
4880 stubh1.GetCode(isolate);
4881 InternalArraySingleArgumentConstructorStub stubh2(kinds[i]);
4882 stubh2.GetCode(isolate);
4883 InternalArrayNArgumentsConstructorStub stubh3(kinds[i]);
4884 stubh3.GetCode(isolate);
4889 void ArrayConstructorStub::GenerateDispatchToArrayStub(
4890 MacroAssembler* masm,
4892 if (argument_count_ ==
ANY) {
4893 Label not_zero_case, not_one_case;
4895 __ j(not_zero, ¬_zero_case);
4896 CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm,
mode);
4898 __ bind(¬_zero_case);
4899 __ cmpl(
rax, Immediate(1));
4901 CreateArrayDispatchOneArgument(masm, mode);
4903 __ bind(¬_one_case);
4904 CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm,
mode);
4905 }
else if (argument_count_ ==
NONE) {
4906 CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm,
mode);
4907 }
else if (argument_count_ ==
ONE) {
4908 CreateArrayDispatchOneArgument(masm, mode);
4910 CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm,
mode);
4925 if (FLAG_debug_code) {
4934 __ Check(not_smi, kUnexpectedInitialMapForArrayFunction);
4936 __ Check(
equal, kUnexpectedInitialMapForArrayFunction);
4939 __ AssertUndefinedOrAllocationSite(
rbx);
4945 __ CompareRoot(
rbx, Heap::kUndefinedValueRootIndex);
4960 void InternalArrayConstructorStub::GenerateCase(
4962 Label not_zero_case, not_one_case;
4963 Label normal_sequence;
4966 __ j(not_zero, ¬_zero_case);
4967 InternalArrayNoArgumentConstructorStub stub0(kind);
4968 __ TailCallStub(&stub0);
4970 __ bind(¬_zero_case);
4971 __ cmpl(
rax, Immediate(1));
4978 __ movp(
rcx, args.GetArgumentOperand(0));
4980 __ j(zero, &normal_sequence);
4982 InternalArraySingleArgumentConstructorStub
4984 __ TailCallStub(&stub1_holey);
4987 __ bind(&normal_sequence);
4988 InternalArraySingleArgumentConstructorStub stub1(kind);
4989 __ TailCallStub(&stub1);
4991 __ bind(¬_one_case);
4992 InternalArrayNArgumentsConstructorStub stubN(kind);
4993 __ TailCallStub(&stubN);
5005 if (FLAG_debug_code) {
5014 __ Check(not_smi, kUnexpectedInitialMapForArrayFunction);
5016 __ Check(
equal, kUnexpectedInitialMapForArrayFunction);
5029 if (FLAG_debug_code) {
5035 kInvalidElementsKindForInternalArrayOrInternalPackedArray);
5039 Label fast_elements_case;
5041 __ j(
equal, &fast_elements_case);
5044 __ bind(&fast_elements_case);
5049 void CallApiFunctionStub::Generate(MacroAssembler* masm) {
5064 Register callee =
rax;
5065 Register call_data =
rbx;
5066 Register holder =
rcx;
5067 Register api_function_address =
rdx;
5068 Register return_address =
rdi;
5069 Register context =
rsi;
5071 int argc = ArgumentBits::decode(bit_field_);
5072 bool is_store = IsStoreBits::decode(bit_field_);
5073 bool call_data_undefined = CallDataUndefinedBits::decode(bit_field_);
5075 typedef FunctionCallbackArguments FCA;
5086 __ PopReturnAddressTo(return_address);
5098 Register scratch = call_data;
5099 if (!call_data_undefined) {
5100 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
5108 ExternalReference::isolate_address(masm->isolate()));
5113 __ movp(scratch,
rsp);
5115 __ PushReturnAddressFrom(return_address);
5119 const int kApiStackSpace = 4;
5121 __ PrepareCallApiFunction(kApiStackSpace);
5125 __ addp(scratch, Immediate((argc + FCA::kArgsLength - 1) * kPointerSize));
5131 #if defined(__MINGW64__) || defined(_WIN64)
5132 Register arguments_arg =
rcx;
5133 Register callback_arg =
rdx;
5135 Register arguments_arg =
rdi;
5136 Register callback_arg =
rsi;
5141 ASSERT(!api_function_address.is(arguments_arg));
5149 StackArgumentsAccessor args_from_rbp(
rbp, FCA::kArgsLength + 1,
5151 Operand context_restore_operand = args_from_rbp.GetArgumentOperand(
5152 FCA::kArgsLength - FCA::kContextSaveIndex);
5154 Operand return_value_operand = args_from_rbp.GetArgumentOperand(
5155 is_store ? 0 : FCA::kArgsLength - FCA::kReturnValueOffset);
5156 __ CallApiFunctionAndReturn(
5157 api_function_address,
5160 argc + FCA::kArgsLength + 1,
5161 return_value_operand,
5162 &context_restore_operand);
5166 void CallApiGetterStub::Generate(MacroAssembler* masm) {
5175 #if defined(__MINGW64__) || defined(_WIN64)
5176 Register getter_arg =
r8;
5177 Register accessor_info_arg =
rdx;
5178 Register name_arg =
rcx;
5180 Register getter_arg =
rdx;
5181 Register accessor_info_arg =
rsi;
5182 Register name_arg =
rdi;
5184 Register api_function_address =
r8;
5185 Register scratch =
rax;
5191 const int kArgStackSpace = 1;
5195 __ PrepareCallApiFunction(kArgStackSpace);
5196 __ leap(scratch, Operand(name_arg, 1 * kPointerSize));
5209 ASSERT(!api_function_address.is(accessor_info_arg) &&
5210 !api_function_address.is(name_arg));
5214 Operand return_value_operand = args.GetArgumentOperand(
5217 __ CallApiFunctionAndReturn(api_function_address,
5221 return_value_operand,
5230 #endif // V8_TARGET_ARCH_X64
static const int kResourceDataOffset
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter NULL
static const int kElementsKindMask
static RelocInfo::Mode RelocInfoNone()
void GenerateFast(MacroAssembler *masm)
static const int kLengthOffset
static const int kHashFieldOffset
static const int kBitFieldOffset
void GenerateFast(MacroAssembler *masm)
STATIC_CHECK((kStringRepresentationMask|kStringEncodingMask)==Internals::kFullStringRepresentationMask)
static const int kArgsLength
static const int kMaxLength
const intptr_t kSmiTagMask
static const int kEvacuationCandidateMask
void GenerateSlow(MacroAssembler *masm, const RuntimeCallHelper &call_helper)
bool IsHoleyElementsKind(ElementsKind kind)
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
static const int kPrototypeOrInitialMapOffset
#define COMPARE(asm_, compare_string)
static int SlotOffset(int index)
static void GenerateAheadOfTime(Isolate *isolate)
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
static const uint32_t kMask
static const int kCallerStackParameterCountFrameOffset
void Generate(MacroAssembler *masm)
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
const uint32_t kTwoByteStringTag
const int kFailureTypeTagSize
static const uint32_t kExponentMask
static Failure * InternalError()
static const int kReturnValueOffset
static Smi * FromInt(int value)
bool IsFastObjectElementsKind(ElementsKind kind)
void Generate(MacroAssembler *masm)
static const byte kTwoByteNopInstruction
static const int kDataOffset
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
static const int kJSRegexpStaticOffsetsVectorSize
static Representation Integer32()
static void GenerateAheadOfTime(Isolate *isolate)
static const int kNativeByteOffset
static void GenerateHashGetHash(MacroAssembler *masm, Register hash)
static const int kExponentBias
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor) V8_OVERRIDE
static const intptr_t kPageAlignmentMask
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
static const int kStrictModeBitWithinByte
static Failure * Exception()
const uint32_t kIsNotInternalizedMask
AllocationSiteOverrideMode
static const Function * FunctionForId(FunctionId id)
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
#define ASSERT(condition)
void Generate(MacroAssembler *masm)
static const int kContextOffset
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
static Handle< Object > MegamorphicSentinel(Isolate *isolate)
const int kPointerSizeLog2
static const int kShortCallInstructionLength
static void GenerateCompareFlatAsciiStrings(MacroAssembler *masm, Register left, Register right, Register scratch1, Register scratch2, Register scratch3, Register scratch4)
const uint32_t kStringRepresentationMask
static const int kCallerFPOffset
static const int kEntrySize
MemOperand ContextOperand(Register context, int index)
static const int kContextOffset
const uint32_t kShortExternalStringMask
int GetSequenceIndexFromFastElementsKind(ElementsKind elements_kind)
static const int kLastSubjectOffset
static const int kZeroHash
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
static const int kLastCaptureCountOffset
static const int kFirstOffset
static const int kMinLength
const uint32_t kNotStringTag
static const int kParentOffset
static const int kLiteralsOffset
const int kFastElementsKindPackedToHoley
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_string(expose_natives_as
static void GenerateCopyCharactersREP(MacroAssembler *masm, Register dest, Register src, Register count, Register scratch, bool ascii)
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
static const int kLengthOffset
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
static const int kExponentShift
const intptr_t kFailureTagMask
static const int kValueOffset
const int kFailureTagSize
bool IsFastPackedElementsKind(ElementsKind kind)
static void GenerateFlatAsciiStringEquals(MacroAssembler *masm, Register left, Register right, Register scratch1, Register scratch2, Register scratch3)
static const int kIrregexpCaptureCountOffset
static const size_t kWriteBarrierCounterOffset
const uint32_t kIsIndirectStringMask
ElementsKind GetFastElementsKindFromSequenceIndex(int sequence_number)
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
static const int kTransitionInfoOffset
static void MaybeCallEntryHook(MacroAssembler *masm)
Operand FieldOperand(Register object, int offset)
static void GenerateAheadOfTime(Isolate *isolate)
void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
static const int kCallerSPOffset
Operand StackSpaceOperand(int index)
static const byte kFiveByteNopInstruction
static const uint32_t kShift
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
virtual void Generate(MacroAssembler *masm)
bool IsFastSmiElementsKind(ElementsKind kind)
static const int kMinLength
const uint32_t kShortExternalStringTag
static void GenerateHashAddCharacter(MacroAssembler *masm, Register hash, Register character)
static void Generate(MacroAssembler *masm, Register string, Register index, Register result, Label *call_runtime)
static const int kHeaderSize
static void GenerateStubsAheadOfTime(Isolate *isolate)
static const int kElementsOffset
static const int kNativeBitWithinByte
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
const int kRootRegisterBias
const uint32_t kStringTag
void Generate(MacroAssembler *masm)
static bool IsEqualityOp(Value op)
static Representation External()
static const int kOffsetOffset
const uint32_t kInternalizedTag
static const int kLengthOffset
#define T(name, string, precedence)
static void GenerateNegativeLookup(MacroAssembler *masm, Label *miss, Label *done, Register receiver, Register properties, Handle< Name > name, Register scratch0)
void GenerateSlow(MacroAssembler *masm, const RuntimeCallHelper &call_helper)
virtual void Generate(MacroAssembler *masm)
bool IsFastSmiOrObjectElementsKind(ElementsKind kind)
static const int kLastMatchOverhead
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
static const int kHeaderSize
void Generate(MacroAssembler *masm)
void Load(const v8::FunctionCallbackInfo< v8::Value > &args)
static Builtins::Name MissBuiltin(Code::Kind kind)
static const int kMapOffset
static const int kMaxShortLength
bool is(Register reg) const
static const int kSkipEvacuationSlotsRecordingMask
const uint32_t kIsNotStringMask
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
const uint32_t kSlicedNotConsMask
static const int kLengthOffset
void Generate(MacroAssembler *masm)
static const int kSecondOffset
static Handle< Object > UninitializedSentinel(Isolate *isolate)
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor) V8_OVERRIDE
Handle< T > handle(T *t, Isolate *isolate)
static const int kArgumentsLengthIndex
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
const Register kScratchRegister
static const int kFunctionOffset
ElementsKind GetInitialFastElementsKind()
static const int kFirstCaptureOffset
static const int kLastInputOffset
static void GenerateMapChangeElementsTransition(MacroAssembler *masm, AllocationSiteMode mode, Label *allocation_memento_found)
static const int kStrictModeByteOffset
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
static const int kHeaderSize
void InvokeAccessorGetterCallback(v8::Local< v8::String > property, const v8::PropertyCallbackInfo< v8::Value > &info, v8::AccessorGetterCallback getter)
static AllocationSiteMode GetMode(ElementsKind boilerplate_elements_kind)
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
void GenerateBody(MacroAssembler *masm, bool is_construct)
static const int kDataAsciiCodeOffset
ICCompareStub(Token::Value op, CompareIC::State left, CompareIC::State right, CompareIC::State handler)
static void GenerateStubsAheadOfTime(Isolate *isolate)
Condition NegateCondition(Condition cond)
#define ASSERT_EQ(v1, v2)
static const int kElementsKindShift
CodeStubInterfaceDescriptor * GetInterfaceDescriptor(Isolate *isolate)
const uint32_t kOneByteStringTag
Operand StackOperandForReturnAddress(int32_t disp)
void Generate(MacroAssembler *masm)
static const int kArgumentsCalleeIndex
static const int kIsUndetectable
static const int kHeaderSize
static void InitializeForIsolate(Isolate *isolate)
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
static const int kDataTagOffset
static const uint32_t kHashBitMask
static const int kPrototypeOffset
void Generate(MacroAssembler *masm)
static const int kHashShift
static void GenerateAheadOfTime(Isolate *isolate)
static const int kMaxLength
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
static Representation Tagged()
static const int kNativeContextOffset
static void GenerateFixedRegStubsAheadOfTime(Isolate *isolate)
static const int kConstructStubOffset
static const int kEmptyHashField
static const int kSharedFunctionInfoOffset
static void GeneratePositiveLookup(MacroAssembler *masm, Label *miss, Label *done, Register elements, Register name, Register r0, Register r1)
static const int kSloppyArgumentsObjectSize
void InvokeFunctionCallback(const v8::FunctionCallbackInfo< v8::Value > &info, v8::FunctionCallback callback)
static const int kMaxValue
static const int kBitField2Offset
static const int kMantissaBits
void Generate(MacroAssembler *masm)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in name
CEntryStub(int result_size, SaveFPRegsMode save_doubles=kDontSaveFPRegs)
ElementsKind GetHoleyElementsKind(ElementsKind packed_kind)
static const int32_t kMaxOneByteCharCode
static const int kDataUC16CodeOffset
StoreBufferOverflowStub(SaveFPRegsMode save_fp)
static const int kStrictArgumentsObjectSize
NameDictionaryLookupStub(LookupMode mode)
static void GenerateHashInit(MacroAssembler *masm, Register hash, Register character)
bool IsFastDoubleElementsKind(ElementsKind kind)
static bool IsOrderedRelationalCompareOp(Value op)
const uint32_t kStringEncodingMask
static const int kInstanceTypeOffset
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
void Generate(MacroAssembler *masm)
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
static const int kPhysicalSignificandSize