30 #if V8_TARGET_ARCH_IA32
48 CodeStubInterfaceDescriptor* descriptor) {
49 static Register registers[] = {
ebx };
50 descriptor->register_param_count_ = 1;
51 descriptor->register_params_ = registers;
52 descriptor->deoptimization_handler_ =
57 void FastNewContextStub::InitializeInterfaceDescriptor(
59 CodeStubInterfaceDescriptor* descriptor) {
60 static Register registers[] = {
edi };
61 descriptor->register_param_count_ = 1;
62 descriptor->register_params_ = registers;
63 descriptor->deoptimization_handler_ =
NULL;
69 CodeStubInterfaceDescriptor* descriptor) {
70 static Register registers[] = {
eax };
71 descriptor->register_param_count_ = 1;
72 descriptor->register_params_ = registers;
73 descriptor->deoptimization_handler_ =
NULL;
77 void NumberToStringStub::InitializeInterfaceDescriptor(
79 CodeStubInterfaceDescriptor* descriptor) {
80 static Register registers[] = {
eax };
81 descriptor->register_param_count_ = 1;
82 descriptor->register_params_ = registers;
83 descriptor->deoptimization_handler_ =
90 CodeStubInterfaceDescriptor* descriptor) {
91 static Register registers[] = {
eax,
ebx,
ecx };
92 descriptor->register_param_count_ = 3;
93 descriptor->register_params_ = registers;
94 descriptor->deoptimization_handler_ =
96 Runtime::kHiddenCreateArrayLiteralStubBailout)->
entry;
102 CodeStubInterfaceDescriptor* descriptor) {
104 descriptor->register_param_count_ = 4;
105 descriptor->register_params_ = registers;
106 descriptor->deoptimization_handler_ =
113 CodeStubInterfaceDescriptor* descriptor) {
114 static Register registers[] = {
ebx,
edx };
115 descriptor->register_param_count_ = 2;
116 descriptor->register_params_ = registers;
117 descriptor->deoptimization_handler_ =
NULL;
123 CodeStubInterfaceDescriptor* descriptor) {
124 static Register registers[] = {
edx,
ecx };
125 descriptor->register_param_count_ = 2;
126 descriptor->register_params_ = registers;
127 descriptor->deoptimization_handler_ =
134 CodeStubInterfaceDescriptor* descriptor) {
135 static Register registers[] = {
edx,
ecx };
136 descriptor->register_param_count_ = 2;
137 descriptor->register_params_ = registers;
138 descriptor->deoptimization_handler_ =
143 void RegExpConstructResultStub::InitializeInterfaceDescriptor(
145 CodeStubInterfaceDescriptor* descriptor) {
146 static Register registers[] = {
ecx,
ebx,
eax };
147 descriptor->register_param_count_ = 3;
148 descriptor->register_params_ = registers;
149 descriptor->deoptimization_handler_ =
156 CodeStubInterfaceDescriptor* descriptor) {
157 static Register registers[] = {
edx };
158 descriptor->register_param_count_ = 1;
159 descriptor->register_params_ = registers;
160 descriptor->deoptimization_handler_ =
NULL;
166 CodeStubInterfaceDescriptor* descriptor) {
167 static Register registers[] = {
edx };
168 descriptor->register_param_count_ = 1;
169 descriptor->register_params_ = registers;
170 descriptor->deoptimization_handler_ =
NULL;
176 CodeStubInterfaceDescriptor* descriptor) {
177 static Register registers[] = {
edx,
ecx };
178 descriptor->register_param_count_ = 2;
179 descriptor->register_params_ = registers;
180 descriptor->deoptimization_handler_ =
NULL;
186 CodeStubInterfaceDescriptor* descriptor) {
187 static Register registers[] = {
edx,
ecx };
188 descriptor->register_param_count_ = 2;
189 descriptor->register_params_ = registers;
190 descriptor->deoptimization_handler_ =
NULL;
196 CodeStubInterfaceDescriptor* descriptor) {
197 static Register registers[] = {
edx,
ecx,
eax };
198 descriptor->register_param_count_ = 3;
199 descriptor->register_params_ = registers;
200 descriptor->deoptimization_handler_ =
207 CodeStubInterfaceDescriptor* descriptor) {
208 static Register registers[] = {
eax,
ebx };
209 descriptor->register_param_count_ = 2;
210 descriptor->register_params_ = registers;
211 descriptor->deoptimization_handler_ =
216 static void InitializeArrayConstructorDescriptor(
218 CodeStubInterfaceDescriptor* descriptor,
219 int constant_stack_parameter_count) {
224 static Register registers_variable_args[] = {
edi,
ebx,
eax };
225 static Register registers_no_args[] = {
edi,
ebx };
227 if (constant_stack_parameter_count == 0) {
228 descriptor->register_param_count_ = 2;
229 descriptor->register_params_ = registers_no_args;
233 descriptor->stack_parameter_count_ =
eax;
234 descriptor->register_param_count_ = 3;
235 descriptor->register_params_ = registers_variable_args;
238 descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count;
240 descriptor->deoptimization_handler_ =
245 static void InitializeInternalArrayConstructorDescriptor(
247 CodeStubInterfaceDescriptor* descriptor,
248 int constant_stack_parameter_count) {
252 static Register registers_variable_args[] = {
edi,
eax };
253 static Register registers_no_args[] = {
edi };
255 if (constant_stack_parameter_count == 0) {
256 descriptor->register_param_count_ = 1;
257 descriptor->register_params_ = registers_no_args;
261 descriptor->stack_parameter_count_ =
eax;
262 descriptor->register_param_count_ = 2;
263 descriptor->register_params_ = registers_variable_args;
266 descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count;
268 descriptor->deoptimization_handler_ =
275 CodeStubInterfaceDescriptor* descriptor) {
276 InitializeArrayConstructorDescriptor(isolate, descriptor, 0);
282 CodeStubInterfaceDescriptor* descriptor) {
283 InitializeArrayConstructorDescriptor(isolate, descriptor, 1);
289 CodeStubInterfaceDescriptor* descriptor) {
290 InitializeArrayConstructorDescriptor(isolate, descriptor, -1);
296 CodeStubInterfaceDescriptor* descriptor) {
297 InitializeInternalArrayConstructorDescriptor(isolate, descriptor, 0);
303 CodeStubInterfaceDescriptor* descriptor) {
304 InitializeInternalArrayConstructorDescriptor(isolate, descriptor, 1);
310 CodeStubInterfaceDescriptor* descriptor) {
311 InitializeInternalArrayConstructorDescriptor(isolate, descriptor, -1);
317 CodeStubInterfaceDescriptor* descriptor) {
318 static Register registers[] = {
eax };
319 descriptor->register_param_count_ = 1;
320 descriptor->register_params_ = registers;
321 descriptor->deoptimization_handler_ =
323 descriptor->SetMissHandler(
324 ExternalReference(IC_Utility(IC::kCompareNilIC_Miss), isolate));
329 CodeStubInterfaceDescriptor* descriptor) {
330 static Register registers[] = {
eax };
331 descriptor->register_param_count_ = 1;
332 descriptor->register_params_ = registers;
333 descriptor->deoptimization_handler_ =
335 descriptor->SetMissHandler(
336 ExternalReference(IC_Utility(IC::kToBooleanIC_Miss), isolate));
342 CodeStubInterfaceDescriptor* descriptor) {
343 static Register registers[] = {
edx,
ecx,
eax };
344 descriptor->register_param_count_ = 3;
345 descriptor->register_params_ = registers;
346 descriptor->deoptimization_handler_ =
353 CodeStubInterfaceDescriptor* descriptor) {
355 descriptor->register_param_count_ = 4;
356 descriptor->register_params_ = registers;
357 descriptor->deoptimization_handler_ =
364 CodeStubInterfaceDescriptor* descriptor) {
365 static Register registers[] = {
edx,
eax };
366 descriptor->register_param_count_ = 2;
367 descriptor->register_params_ = registers;
368 descriptor->deoptimization_handler_ =
FUNCTION_ADDR(BinaryOpIC_Miss);
369 descriptor->SetMissHandler(
370 ExternalReference(IC_Utility(IC::kBinaryOpIC_Miss), isolate));
374 void BinaryOpWithAllocationSiteStub::InitializeInterfaceDescriptor(
376 CodeStubInterfaceDescriptor* descriptor) {
377 static Register registers[] = {
ecx,
edx,
eax };
378 descriptor->register_param_count_ = 3;
379 descriptor->register_params_ = registers;
380 descriptor->deoptimization_handler_ =
385 void StringAddStub::InitializeInterfaceDescriptor(
387 CodeStubInterfaceDescriptor* descriptor) {
388 static Register registers[] = {
edx,
eax };
389 descriptor->register_param_count_ = 2;
390 descriptor->register_params_ = registers;
391 descriptor->deoptimization_handler_ =
398 CallInterfaceDescriptor* descriptor =
400 static Register registers[] = {
edi,
405 static Representation representations[] = {
411 descriptor->register_param_count_ = 4;
412 descriptor->register_params_ = registers;
413 descriptor->param_representations_ = representations;
416 CallInterfaceDescriptor* descriptor =
418 static Register registers[] = {
esi,
421 static Representation representations[] = {
425 descriptor->register_param_count_ = 2;
426 descriptor->register_params_ = registers;
427 descriptor->param_representations_ = representations;
430 CallInterfaceDescriptor* descriptor =
432 static Register registers[] = {
esi,
435 static Representation representations[] = {
439 descriptor->register_param_count_ = 2;
440 descriptor->register_params_ = registers;
441 descriptor->param_representations_ = representations;
444 CallInterfaceDescriptor* descriptor =
446 static Register registers[] = {
esi,
449 static Representation representations[] = {
453 descriptor->register_param_count_ = 2;
454 descriptor->register_params_ = registers;
455 descriptor->param_representations_ = representations;
458 CallInterfaceDescriptor* descriptor =
460 static Register registers[] = {
eax,
466 static Representation representations[] = {
473 descriptor->register_param_count_ = 5;
474 descriptor->register_params_ = registers;
475 descriptor->param_representations_ = representations;
480 #define __ ACCESS_MASM(masm)
483 void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm) {
485 Isolate* isolate = masm->isolate();
486 isolate->counters()->code_stubs()->Increment();
489 int param_count = descriptor->register_param_count_;
493 ASSERT(descriptor->register_param_count_ == 0 ||
494 eax.
is(descriptor->register_params_[param_count - 1]));
496 for (
int i = 0; i < param_count; ++i) {
497 __ push(descriptor->register_params_[i]);
499 ExternalReference miss = descriptor->miss_handler();
500 __ CallExternalReference(miss, descriptor->register_param_count_);
513 CpuFeatureScope scope(masm,
SSE2);
520 const int argument_count = 1;
522 AllowExternalCallThatCantCauseGC scope(masm);
523 __ PrepareCallCFunction(argument_count,
ecx);
525 Immediate(ExternalReference::isolate_address(masm->isolate())));
527 ExternalReference::store_buffer_overflow_function(masm->isolate()),
530 CpuFeatureScope scope(masm,
SSE2);
542 class FloatingPointHelper :
public AllStatic {
553 static void LoadFloatOperand(MacroAssembler* masm, Register number);
558 static void CheckFloatOperands(MacroAssembler* masm,
566 static void LoadSSE2Operands(MacroAssembler* masm, Label* not_numbers);
571 Register input_reg = this->
source();
575 Label check_negative, process_64_bits, done, done_no_stash;
577 int double_offset =
offset();
588 Register scratch_candidates[3] = {
ebx,
edx,
edi };
589 for (
int i = 0; i < 3; i++) {
590 scratch1 = scratch_candidates[i];
591 if (!final_result_reg.is(scratch1) && !input_reg.is(scratch1))
break;
596 Register result_reg = final_result_reg.
is(
ecx) ?
eax : final_result_reg;
600 Register save_reg = final_result_reg.
is(
ecx) ?
eax :
ecx;
604 bool stash_exponent_copy = !input_reg.is(
esp);
605 __ mov(scratch1, mantissa_operand);
607 CpuFeatureScope scope(masm,
SSE3);
609 __ fld_d(mantissa_operand);
611 __ mov(
ecx, exponent_operand);
612 if (stash_exponent_copy)
__ push(
ecx);
625 __ sub(
ecx, Immediate(delta));
626 __ xor_(result_reg, result_reg);
627 __ cmp(
ecx, Immediate(31));
630 __ jmp(&check_negative);
632 __ bind(&process_64_bits);
634 CpuFeatureScope scope(masm,
SSE3);
635 if (stash_exponent_copy) {
644 __ fisttp_d(Operand(
esp, 0));
645 __ mov(result_reg, Operand(
esp, 0));
647 __ jmp(&done_no_stash);
650 __ sub(
ecx, Immediate(delta));
652 if (stash_exponent_copy) {
655 __ mov(result_reg, exponent_operand);
661 __ shrd(result_reg, scratch1);
662 __ shr_cl(result_reg);
663 __ test(
ecx, Immediate(32));
665 CpuFeatureScope use_cmov(masm,
CMOV);
669 __ j(
equal, &skip_mov, Label::kNear);
670 __ mov(scratch1, result_reg);
676 __ bind(&check_negative);
677 __ mov(result_reg, scratch1);
679 if (stash_exponent_copy) {
682 __ cmp(exponent_operand, Immediate(0));
685 CpuFeatureScope use_cmov(masm,
CMOV);
690 __ mov(result_reg, scratch1);
696 if (stash_exponent_copy) {
699 __ bind(&done_no_stash);
700 if (!final_result_reg.is(result_reg)) {
702 __ mov(final_result_reg, result_reg);
710 void FloatingPointHelper::LoadFloatOperand(MacroAssembler* masm,
712 Label load_smi, done;
714 __ JumpIfSmi(number, &load_smi, Label::kNear);
716 __ jmp(&done, Label::kNear);
721 __ fild_s(Operand(
esp, 0));
728 void FloatingPointHelper::LoadSSE2Operands(MacroAssembler* masm,
729 Label* not_numbers) {
730 Label load_smi_edx, load_eax, load_smi_eax, load_float_eax, done;
732 __ JumpIfSmi(
edx, &load_smi_edx, Label::kNear);
733 Factory* factory = masm->isolate()->factory();
739 __ JumpIfSmi(
eax, &load_smi_eax, Label::kNear);
741 __ j(
equal, &load_float_eax, Label::kNear);
743 __ bind(&load_smi_edx);
748 __ bind(&load_smi_eax);
752 __ jmp(&done, Label::kNear);
753 __ bind(&load_float_eax);
759 void FloatingPointHelper::CheckFloatOperands(MacroAssembler* masm,
762 Label test_other, done;
765 __ JumpIfSmi(
edx, &test_other, Label::kNear);
767 Factory* factory = masm->isolate()->factory();
768 __ cmp(scratch, factory->heap_number_map());
771 __ bind(&test_other);
772 __ JumpIfSmi(
eax, &done, Label::kNear);
774 __ cmp(scratch, factory->heap_number_map());
783 CpuFeatureScope use_sse2(masm,
SSE2);
784 Factory* factory = masm->isolate()->factory();
785 const Register exponent =
eax;
786 const Register base =
edx;
787 const Register scratch =
ecx;
788 const XMMRegister double_result =
xmm3;
789 const XMMRegister double_base =
xmm2;
790 const XMMRegister double_exponent =
xmm1;
791 const XMMRegister double_scratch =
xmm4;
793 Label call_runtime, done, exponent_not_smi, int_exponent;
796 __ mov(scratch, Immediate(1));
797 __ Cvtsi2sd(double_result, scratch);
800 Label base_is_smi, unpack_exponent;
807 __ JumpIfSmi(base, &base_is_smi, Label::kNear);
809 factory->heap_number_map());
813 __ jmp(&unpack_exponent, Label::kNear);
815 __ bind(&base_is_smi);
817 __ Cvtsi2sd(double_base, base);
819 __ bind(&unpack_exponent);
820 __ JumpIfNotSmi(exponent, &exponent_not_smi, Label::kNear);
821 __ SmiUntag(exponent);
822 __ jmp(&int_exponent);
824 __ bind(&exponent_not_smi);
826 factory->heap_number_map());
828 __ movsd(double_exponent,
830 }
else if (exponent_type_ ==
TAGGED) {
831 __ JumpIfNotSmi(exponent, &exponent_not_smi, Label::kNear);
832 __ SmiUntag(exponent);
833 __ jmp(&int_exponent);
835 __ bind(&exponent_not_smi);
836 __ movsd(double_exponent,
840 if (exponent_type_ !=
INTEGER) {
841 Label fast_power, try_arithmetic_simplification;
842 __ DoubleToI(exponent, double_exponent, double_scratch,
844 __ jmp(&int_exponent);
846 __ bind(&try_arithmetic_simplification);
848 __ cvttsd2si(exponent, Operand(double_exponent));
849 __ cmp(exponent, Immediate(0x1));
856 Label continue_sqrt, continue_rsqrt, not_plus_half;
859 __ mov(scratch, Immediate(0x3F000000u));
860 __ movd(double_scratch, scratch);
861 __ cvtss2sd(double_scratch, double_scratch);
863 __ ucomisd(double_scratch, double_exponent);
870 __ mov(scratch, 0xFF800000u);
871 __ movd(double_scratch, scratch);
872 __ cvtss2sd(double_scratch, double_scratch);
873 __ ucomisd(double_base, double_scratch);
877 __ j(
carry, &continue_sqrt, Label::kNear);
880 __ xorps(double_result, double_result);
881 __ subsd(double_result, double_scratch);
884 __ bind(&continue_sqrt);
886 __ xorps(double_scratch, double_scratch);
887 __ addsd(double_scratch, double_base);
888 __ sqrtsd(double_result, double_scratch);
892 __ bind(¬_plus_half);
894 __ subsd(double_scratch, double_result);
896 __ ucomisd(double_scratch, double_exponent);
903 __ mov(scratch, 0xFF800000u);
904 __ movd(double_scratch, scratch);
905 __ cvtss2sd(double_scratch, double_scratch);
906 __ ucomisd(double_base, double_scratch);
910 __ j(
carry, &continue_rsqrt, Label::kNear);
913 __ xorps(double_result, double_result);
916 __ bind(&continue_rsqrt);
918 __ xorps(double_exponent, double_exponent);
919 __ addsd(double_exponent, double_base);
920 __ sqrtsd(double_exponent, double_exponent);
921 __ divsd(double_result, double_exponent);
926 Label fast_power_failed;
927 __ bind(&fast_power);
931 __ movsd(Operand(
esp, 0), double_exponent);
932 __ fld_d(Operand(
esp, 0));
933 __ movsd(Operand(
esp, 0), double_base);
934 __ fld_d(Operand(
esp, 0));
953 __ test_b(
eax, 0x5F);
954 __ j(
not_zero, &fast_power_failed, Label::kNear);
955 __ fstp_d(Operand(
esp, 0));
956 __ movsd(double_result, Operand(
esp, 0));
960 __ bind(&fast_power_failed);
963 __ jmp(&call_runtime);
967 __ bind(&int_exponent);
968 const XMMRegister double_scratch2 = double_exponent;
969 __ mov(scratch, exponent);
970 __ movsd(double_scratch, double_base);
971 __ movsd(double_scratch2, double_result);
974 Label no_neg, while_true, while_false;
975 __ test(scratch, scratch);
980 __ j(
zero, &while_false, Label::kNear);
984 __ j(
above, &while_true, Label::kNear);
985 __ movsd(double_result, double_scratch);
986 __ j(
zero, &while_false, Label::kNear);
988 __ bind(&while_true);
990 __ mulsd(double_scratch, double_scratch);
991 __ j(
above, &while_true, Label::kNear);
992 __ mulsd(double_result, double_scratch);
995 __ bind(&while_false);
998 __ test(exponent, exponent);
1000 __ divsd(double_scratch2, double_result);
1001 __ movsd(double_result, double_scratch2);
1004 __ xorps(double_scratch2, double_scratch2);
1005 __ ucomisd(double_scratch2, double_result);
1010 __ Cvtsi2sd(double_exponent, exponent);
1013 Counters* counters = masm->isolate()->counters();
1016 __ bind(&call_runtime);
1017 __ TailCallRuntime(Runtime::kMath_pow_cfunction, 2, 1);
1022 __ AllocateHeapNumber(
eax, scratch, base, &call_runtime);
1024 __ IncrementCounter(counters->math_pow(), 1);
1027 __ bind(&call_runtime);
1029 AllowExternalCallThatCantCauseGC scope(masm);
1030 __ PrepareCallCFunction(4, scratch);
1034 ExternalReference::power_double_double_function(masm->isolate()), 4);
1039 __ fstp_d(Operand(
esp, 0));
1040 __ movsd(double_result, Operand(
esp, 0));
1044 __ IncrementCounter(counters->math_pow(), 1);
1058 if (
kind() == Code::KEYED_LOAD_IC) {
1059 __ cmp(
ecx, Immediate(masm->isolate()->factory()->prototype_string()));
1063 StubCompiler::GenerateLoadFunctionPrototype(masm,
edx,
eax,
ebx, &miss);
1065 StubCompiler::TailCallBuiltin(
1070 void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
1080 __ JumpIfNotSmi(
edx, &slow, Label::kNear);
1087 __ j(
equal, &adaptor, Label::kNear);
1125 __ TailCallRuntime(Runtime::kGetArgumentsProperty, 1, 1);
1129 void ArgumentsAccessStub::GenerateNewSloppySlow(MacroAssembler* masm) {
1150 __ TailCallRuntime(Runtime::kHiddenNewArgumentsFast, 3, 1);
1154 void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) {
1155 Isolate* isolate = masm->isolate();
1169 Label adaptor_frame, try_allocate;
1173 __ j(
equal, &adaptor_frame, Label::kNear);
1177 __ jmp(&try_allocate, Label::kNear);
1180 __ bind(&adaptor_frame);
1195 __ bind(&try_allocate);
1202 const int kParameterMapHeaderSize =
1204 Label no_parameter_map;
1206 __ j(
zero, &no_parameter_map, Label::kNear);
1208 __ bind(&no_parameter_map);
1225 Label has_mapped_parameters, copy;
1228 __ mov(
ebx, Operand(
esp, 0 * kPointerSize));
1230 __ j(
not_zero, &has_mapped_parameters, Label::kNear);
1233 __ jmp(©, Label::kNear);
1235 __ bind(&has_mapped_parameters);
1255 __ mov(
edx, Operand(
esp, 4 * kPointerSize));
1283 Label skip_parameter_map;
1285 __ j(
zero, &skip_parameter_map);
1288 Immediate(isolate->factory()->sloppy_arguments_elements_map()));
1303 Label parameters_loop, parameters_test;
1305 __ mov(
eax, Operand(
esp, 2 * kPointerSize));
1307 __ add(
ebx, Operand(
esp, 4 * kPointerSize));
1309 __ mov(
ecx, isolate->factory()->the_hole_value());
1322 __ jmp(¶meters_test, Label::kNear);
1324 __ bind(¶meters_loop);
1329 __ bind(¶meters_test);
1331 __ j(
not_zero, ¶meters_loop, Label::kNear);
1334 __ bind(&skip_parameter_map);
1344 Immediate(isolate->factory()->fixed_array_map()));
1347 Label arguments_loop, arguments_test;
1348 __ mov(
ebx, Operand(
esp, 1 * kPointerSize));
1349 __ mov(
edx, Operand(
esp, 4 * kPointerSize));
1352 __ jmp(&arguments_test, Label::kNear);
1354 __ bind(&arguments_loop);
1355 __ sub(
edx, Immediate(kPointerSize));
1360 __ bind(&arguments_test);
1362 __ j(
less, &arguments_loop, Label::kNear);
1369 __ ret(3 * kPointerSize);
1374 __ mov(Operand(
esp, 1 * kPointerSize),
ecx);
1375 __ TailCallRuntime(Runtime::kHiddenNewArgumentsFast, 3, 1);
1379 void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
1380 Isolate* isolate = masm->isolate();
1388 Label adaptor_frame, try_allocate, runtime;
1392 __ j(
equal, &adaptor_frame, Label::kNear);
1395 __ mov(
ecx, Operand(
esp, 1 * kPointerSize));
1396 __ jmp(&try_allocate, Label::kNear);
1399 __ bind(&adaptor_frame);
1401 __ mov(Operand(
esp, 1 * kPointerSize),
ecx);
1404 __ mov(Operand(
esp, 2 * kPointerSize),
edx);
1408 Label add_arguments_object;
1409 __ bind(&try_allocate);
1411 __ j(
zero, &add_arguments_object, Label::kNear);
1413 __ bind(&add_arguments_object);
1434 __ mov(
ecx, Operand(
esp, 1 * kPointerSize));
1442 __ j(
zero, &done, Label::kNear);
1445 __ mov(
edx, Operand(
esp, 2 * kPointerSize));
1452 Immediate(isolate->factory()->fixed_array_map()));
1461 __ mov(
ebx, Operand(
edx, -1 * kPointerSize));
1463 __ add(
edi, Immediate(kPointerSize));
1464 __ sub(
edx, Immediate(kPointerSize));
1470 __ ret(3 * kPointerSize);
1474 __ TailCallRuntime(Runtime::kHiddenNewStrictArgumentsFast, 3, 1);
1478 void RegExpExecStub::Generate(MacroAssembler* masm) {
1482 #ifdef V8_INTERPRETED_REGEXP
1483 __ TailCallRuntime(Runtime::kHiddenRegExpExec, 4, 1);
1484 #else // V8_INTERPRETED_REGEXP
1493 static const int kLastMatchInfoOffset = 1 *
kPointerSize;
1494 static const int kPreviousIndexOffset = 2 *
kPointerSize;
1499 Factory* factory = masm->isolate()->factory();
1502 ExternalReference address_of_regexp_stack_memory_address =
1503 ExternalReference::address_of_regexp_stack_memory_address(
1505 ExternalReference address_of_regexp_stack_memory_size =
1506 ExternalReference::address_of_regexp_stack_memory_size(masm->isolate());
1507 __ mov(
ebx, Operand::StaticVariable(address_of_regexp_stack_memory_size));
1512 __ mov(
eax, Operand(
esp, kJSRegExpOffset));
1514 __ JumpIfSmi(
eax, &runtime);
1520 if (FLAG_debug_code) {
1522 __ Check(
not_zero, kUnexpectedTypeForRegExpDataFixedArrayExpected);
1524 __ Check(
equal, kUnexpectedTypeForRegExpDataFixedArrayExpected);
1546 __ Move(
edi, Immediate(0));
1547 __ mov(
eax, Operand(
esp, kSubjectOffset));
1548 __ JumpIfSmi(
eax, &runtime);
1577 Label seq_one_byte_string , seq_two_byte_string ,
1578 external_string , check_underlying ,
1579 not_seq_nor_cons , check_code ,
1588 __ j(
zero, &seq_two_byte_string);
1595 __ j(
zero, &seq_one_byte_string, Label::kNear);
1612 __ bind(&check_underlying);
1619 __ j(
zero, &seq_two_byte_string);
1631 __ bind(&seq_one_byte_string);
1635 __ mov(
ebx, Operand(
esp, kPreviousIndexOffset));
1636 __ JumpIfNotSmi(
ebx, &runtime);
1640 __ Move(
ecx, Immediate(1));
1643 __ bind(&check_code);
1648 __ JumpIfSmi(
edx, &runtime);
1655 Counters* counters = masm->isolate()->counters();
1656 __ IncrementCounter(counters->regexp_entry_native(), 1);
1659 static const int kRegExpExecuteArguments = 9;
1660 __ EnterApiExitFrame(kRegExpExecuteArguments);
1663 __ mov(Operand(
esp, 8 * kPointerSize),
1664 Immediate(ExternalReference::isolate_address(masm->isolate())));
1667 __ mov(Operand(
esp, 7 * kPointerSize), Immediate(1));
1670 __ mov(
esi, Operand::StaticVariable(address_of_regexp_stack_memory_address));
1671 __ add(
esi, Operand::StaticVariable(address_of_regexp_stack_memory_size));
1672 __ mov(Operand(
esp, 6 * kPointerSize),
esi);
1676 __ mov(Operand(
esp, 5 * kPointerSize), Immediate(0));
1679 __ mov(Operand(
esp, 4 * kPointerSize),
1680 Immediate(ExternalReference::address_of_static_offsets_vector(
1685 __ mov(Operand(
esp, 1 * kPointerSize),
ebx);
1692 __ mov(
esi, Operand(
ebp, kSubjectOffset + kPointerSize));
1693 __ mov(Operand(
esp, 0 * kPointerSize),
esi);
1711 Label setup_two_byte, setup_rest;
1713 __ j(
zero, &setup_two_byte, Label::kNear);
1716 __ mov(Operand(
esp, 3 * kPointerSize),
ecx);
1718 __ mov(Operand(
esp, 2 * kPointerSize),
ecx);
1719 __ jmp(&setup_rest, Label::kNear);
1721 __ bind(&setup_two_byte);
1725 __ mov(Operand(
esp, 3 * kPointerSize),
ecx);
1727 __ mov(Operand(
esp, 2 * kPointerSize),
ecx);
1729 __ bind(&setup_rest);
1736 __ LeaveApiExitFrame(
true);
1754 ExternalReference pending_exception(Isolate::kPendingExceptionAddress,
1756 __ mov(
edx, Immediate(masm->isolate()->factory()->the_hole_value()));
1757 __ mov(
eax, Operand::StaticVariable(pending_exception));
1763 __ mov(Operand::StaticVariable(pending_exception),
edx);
1767 __ cmp(
eax, factory->termination_exception());
1768 Label throw_termination_exception;
1769 __ j(
equal, &throw_termination_exception, Label::kNear);
1774 __ bind(&throw_termination_exception);
1775 __ ThrowUncatchable(
eax);
1779 __ mov(
eax, factory->null_value());
1780 __ ret(4 * kPointerSize);
1784 __ mov(
eax, Operand(
esp, kJSRegExpOffset));
1790 __ add(
edx, Immediate(2));
1795 __ mov(
eax, Operand(
esp, kLastMatchInfoOffset));
1796 __ JumpIfSmi(
eax, &runtime);
1802 __ cmp(
eax, factory->fixed_array_map());
1819 __ mov(
eax, Operand(
esp, kSubjectOffset));
1822 __ RecordWriteField(
ebx,
1829 __ RecordWriteField(
ebx,
1836 ExternalReference address_of_static_offsets_vector =
1837 ExternalReference::address_of_static_offsets_vector(masm->isolate());
1838 __ mov(
ecx, Immediate(address_of_static_offsets_vector));
1843 Label next_capture, done;
1846 __ bind(&next_capture);
1847 __ sub(
edx, Immediate(1));
1858 __ jmp(&next_capture);
1862 __ mov(
eax, Operand(
esp, kLastMatchInfoOffset));
1863 __ ret(4 * kPointerSize);
1867 __ TailCallRuntime(Runtime::kHiddenRegExpExec, 4, 1);
1871 __ bind(¬_seq_nor_cons);
1873 __ j(
greater, ¬_long_external, Label::kNear);
1876 __ bind(&external_string);
1880 if (FLAG_debug_code) {
1884 __ Assert(
zero, kExternalStringExpectedButNotFound);
1899 __ bind(&seq_two_byte_string);
1903 __ mov(
ebx, Operand(
esp, kPreviousIndexOffset));
1904 __ JumpIfNotSmi(
ebx, &runtime);
1908 __ Move(
ecx, Immediate(0));
1909 __ jmp(&check_code);
1912 __ bind(¬_long_external);
1922 __ jmp(&check_underlying);
1923 #endif // V8_INTERPRETED_REGEXP
1927 static int NegativeComparisonResult(
Condition cc) {
1935 static void CheckInputType(MacroAssembler* masm,
1941 __ JumpIfNotSmi(input, fail);
1943 __ JumpIfSmi(input, &ok);
1945 Immediate(masm->isolate()->factory()->heap_number_map()));
1954 static void BranchIfNotInternalizedString(MacroAssembler* masm,
1958 __ JumpIfSmi(
object, label);
1967 void ICCompareStub::GenerateGeneric(MacroAssembler* masm) {
1968 Label check_unequal_objects;
1972 CheckInputType(masm,
edx, left_, &miss);
1973 CheckInputType(masm,
eax, right_, &miss);
1976 Label non_smi, smi_done;
1979 __ JumpIfNotSmi(
ecx, &non_smi, Label::kNear);
1993 Label generic_heap_number_comparison;
1995 Label not_identical;
2002 Label check_for_nan;
2003 __ cmp(
edx, masm->isolate()->factory()->undefined_value());
2007 __ bind(&check_for_nan);
2013 Immediate(masm->isolate()->factory()->heap_number_map()));
2014 __ j(
equal, &generic_heap_number_comparison, Label::kNear);
2024 __ bind(¬_identical);
2029 if (cc ==
equal && strict()) {
2049 __ sub(
ecx, Immediate(0x01));
2058 Immediate(masm->isolate()->factory()->heap_number_map()));
2060 __ j(
equal, &slow, Label::kNear);
2072 Label first_non_object;
2075 __ j(
below, &first_non_object, Label::kNear);
2078 Label return_not_equal;
2080 __ bind(&return_not_equal);
2083 __ bind(&first_non_object);
2086 __ j(
equal, &return_not_equal);
2093 __ j(
equal, &return_not_equal);
2100 Label non_number_comparison;
2102 __ bind(&generic_heap_number_comparison);
2104 CpuFeatureScope use_sse2(masm,
SSE2);
2105 CpuFeatureScope use_cmov(masm,
CMOV);
2107 FloatingPointHelper::LoadSSE2Operands(masm, &non_number_comparison);
2120 FloatingPointHelper::CheckFloatOperands(
2121 masm, &non_number_comparison,
ebx);
2122 FloatingPointHelper::LoadFloatOperand(masm,
eax);
2123 FloatingPointHelper::LoadFloatOperand(masm,
edx);
2129 Label below_label, above_label;
2131 __ j(
below, &below_label, Label::kNear);
2132 __ j(
above, &above_label, Label::kNear);
2134 __ Move(
eax, Immediate(0));
2137 __ bind(&below_label);
2141 __ bind(&above_label);
2148 __ bind(&unordered);
2158 __ bind(&non_number_comparison);
2161 Label check_for_strings;
2163 BranchIfNotInternalizedString(masm, &check_for_strings,
eax,
ecx);
2164 BranchIfNotInternalizedString(masm, &check_for_strings,
edx,
ecx);
2172 __ bind(&check_for_strings);
2175 &check_unequal_objects);
2193 __ Abort(kUnexpectedFallThroughFromStringComparison);
2196 __ bind(&check_unequal_objects);
2197 if (cc ==
equal && !strict()) {
2201 Label not_both_objects;
2202 Label return_unequal;
2210 __ j(
not_zero, ¬_both_objects, Label::kNear);
2212 __ j(
below, ¬_both_objects, Label::kNear);
2214 __ j(
below, ¬_both_objects, Label::kNear);
2220 __ j(
zero, &return_unequal, Label::kNear);
2223 __ j(
zero, &return_unequal, Label::kNear);
2227 __ bind(&return_unequal);
2231 __ bind(¬_both_objects);
2242 builtin = strict() ? Builtins::STRICT_EQUALS : Builtins::EQUALS;
2260 static void GenerateRecordCallTarget(MacroAssembler* masm) {
2268 Isolate* isolate = masm->isolate();
2269 Label initialize, done, miss, megamorphic, not_array_function;
2278 __ j(
equal, &done, Label::kFar);
2280 __ j(
equal, &done, Label::kFar);
2282 if (!FLAG_pretenuring_call_new) {
2287 Handle<Map> allocation_site_map =
2288 masm->isolate()->factory()->allocation_site_map();
2296 __ jmp(&done, Label::kFar);
2307 __ bind(&megamorphic);
2311 __ jmp(&done, Label::kFar);
2315 __ bind(&initialize);
2316 if (!FLAG_pretenuring_call_new) {
2335 CreateAllocationSiteStub create_stub;
2336 __ CallStub(&create_stub);
2346 __ bind(¬_array_function);
2371 Isolate* isolate = masm->isolate();
2372 Label slow, non_function, wrap, cont;
2374 if (NeedsChecks()) {
2376 __ JumpIfSmi(
edi, &non_function);
2382 if (RecordCallTarget()) {
2383 GenerateRecordCallTarget(masm);
2387 __ mov(
ebx, Immediate(isolate->factory()->undefined_value()));
2392 ParameterCount actual(argc_);
2394 if (CallAsMethod()) {
2395 if (NeedsChecks()) {
2409 __ mov(
eax, Operand(
esp, (argc_ + 1) * kPointerSize));
2411 if (NeedsChecks()) {
2412 __ JumpIfSmi(
eax, &wrap);
2425 if (NeedsChecks()) {
2428 if (RecordCallTarget()) {
2442 __ Move(
eax, Immediate(argc_ + 1));
2443 __ Move(
ebx, Immediate(0));
2444 __ GetBuiltinEntry(
edx, Builtins::CALL_FUNCTION_PROXY);
2446 Handle<Code> adaptor = isolate->builtins()->ArgumentsAdaptorTrampoline();
2447 __ jmp(adaptor, RelocInfo::CODE_TARGET);
2452 __ bind(&non_function);
2453 __ mov(Operand(
esp, (argc_ + 1) * kPointerSize),
edi);
2454 __ Move(
eax, Immediate(argc_));
2455 __ Move(
ebx, Immediate(0));
2456 __ GetBuiltinEntry(
edx, Builtins::CALL_NON_FUNCTION);
2457 Handle<Code> adaptor = isolate->builtins()->ArgumentsAdaptorTrampoline();
2458 __ jmp(adaptor, RelocInfo::CODE_TARGET);
2461 if (CallAsMethod()) {
2470 __ mov(Operand(
esp, (argc_ + 1) * kPointerSize),
eax);
2482 Label slow, non_function_call;
2485 __ JumpIfSmi(
edi, &non_function_call);
2490 if (RecordCallTarget()) {
2491 GenerateRecordCallTarget(masm);
2493 if (FLAG_pretenuring_call_new) {
2500 Label feedback_register_initialized;
2504 Handle<Map> allocation_site_map =
2505 masm->isolate()->factory()->allocation_site_map();
2507 __ j(
equal, &feedback_register_initialized);
2508 __ mov(
ebx, masm->isolate()->factory()->undefined_value());
2509 __ bind(&feedback_register_initialized);
2512 __ AssertUndefinedOrAllocationSite(
ebx);
2516 Register jmp_reg =
ecx;
2530 __ GetBuiltinEntry(
edx, Builtins::CALL_FUNCTION_PROXY_AS_CONSTRUCTOR);
2533 __ bind(&non_function_call);
2534 __ GetBuiltinEntry(
edx, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR);
2537 __ Move(
ebx, Immediate(0));
2538 Handle<Code> arguments_adaptor =
2539 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline();
2540 __ jmp(arguments_adaptor, RelocInfo::CODE_TARGET);
2544 bool CEntryStub::NeedsImmovableCode() {
2549 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
2557 PlatformFeatureScope sse2(
SSE2);
2559 BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate);
2562 BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate);
2567 void CodeStub::GenerateFPStubs(Isolate* isolate) {
2572 Code* save_doubles_code;
2573 if (!save_doubles.FindCodeInCache(&save_doubles_code, isolate)) {
2574 save_doubles_code = *(save_doubles.GetCode(isolate));
2576 isolate->set_fp_stubs_generated(
true);
2583 stub.GetCode(isolate);
2587 void CEntryStub::GenerateCore(MacroAssembler* masm,
2588 Label* throw_normal_exception,
2589 Label* throw_termination_exception,
2591 bool always_allocate_scope) {
2602 if (FLAG_debug_code) {
2603 __ CheckStackAlignment();
2612 __ mov(Operand(
esp, 1 * kPointerSize),
2613 Immediate(ExternalReference::isolate_address(masm->isolate())));
2614 __ mov(Operand(
esp, 0 * kPointerSize),
eax);
2618 ExternalReference scope_depth =
2619 ExternalReference::heap_always_allocate_scope_depth(masm->isolate());
2620 if (always_allocate_scope) {
2621 __ inc(Operand::StaticVariable(scope_depth));
2625 __ mov(Operand(
esp, 0 * kPointerSize),
edi);
2626 __ mov(Operand(
esp, 1 * kPointerSize),
esi);
2627 __ mov(Operand(
esp, 2 * kPointerSize),
2628 Immediate(ExternalReference::isolate_address(masm->isolate())));
2632 if (always_allocate_scope) {
2633 __ dec(Operand::StaticVariable(scope_depth));
2638 if (FLAG_debug_code) {
2640 __ cmp(
eax, masm->isolate()->factory()->the_hole_value());
2647 Label failure_returned;
2652 __ j(
zero, &failure_returned);
2654 ExternalReference pending_exception_address(
2655 Isolate::kPendingExceptionAddress, masm->isolate());
2659 if (FLAG_debug_code) {
2661 __ mov(
edx, Immediate(masm->isolate()->factory()->the_hole_value()));
2663 __ cmp(
edx, Operand::StaticVariable(pending_exception_address));
2665 __ j(
equal, &okay, Label::kNear);
2676 __ bind(&failure_returned);
2682 __ j(
zero, &retry, Label::kNear);
2685 __ mov(
eax, Operand::StaticVariable(pending_exception_address));
2688 __ mov(
edx, Immediate(masm->isolate()->factory()->the_hole_value()));
2689 __ mov(Operand::StaticVariable(pending_exception_address),
edx);
2693 __ cmp(
eax, masm->isolate()->factory()->termination_exception());
2694 __ j(
equal, throw_termination_exception);
2697 __ jmp(throw_normal_exception);
2728 Label throw_normal_exception;
2729 Label throw_termination_exception;
2733 &throw_normal_exception,
2734 &throw_termination_exception,
2740 &throw_normal_exception,
2741 &throw_termination_exception,
2747 __ mov(
eax, Immediate(reinterpret_cast<int32_t>(failure)));
2749 &throw_normal_exception,
2750 &throw_termination_exception,
2754 { FrameScope scope(masm, StackFrame::MANUAL);
2755 __ PrepareCallCFunction(0,
eax);
2757 ExternalReference::out_of_memory_function(masm->isolate()), 0);
2760 __ bind(&throw_termination_exception);
2761 __ ThrowUncatchable(
eax);
2763 __ bind(&throw_normal_exception);
2769 Label invoke, handler_entry, exit;
2770 Label not_outermost_js, not_outermost_js_2;
2779 int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY;
2788 ExternalReference c_entry_fp(Isolate::kCEntryFPAddress, masm->isolate());
2789 __ push(Operand::StaticVariable(c_entry_fp));
2792 ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress,
2794 __ cmp(Operand::StaticVariable(js_entry_sp), Immediate(0));
2796 __ mov(Operand::StaticVariable(js_entry_sp),
ebp);
2797 __ push(Immediate(
Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
2798 __ jmp(&invoke, Label::kNear);
2799 __ bind(¬_outermost_js);
2800 __ push(Immediate(
Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME)));
2805 __ bind(&handler_entry);
2806 handler_offset_ = handler_entry.pos();
2809 ExternalReference pending_exception(Isolate::kPendingExceptionAddress,
2811 __ mov(Operand::StaticVariable(pending_exception),
eax);
2818 __ PushTryHandler(StackHandler::JS_ENTRY, 0);
2821 __ mov(
edx, Immediate(masm->isolate()->factory()->the_hole_value()));
2822 __ mov(Operand::StaticVariable(pending_exception),
edx);
2825 __ push(Immediate(0));
2832 ExternalReference construct_entry(Builtins::kJSConstructEntryTrampoline,
2834 __ mov(
edx, Immediate(construct_entry));
2836 ExternalReference entry(Builtins::kJSEntryTrampoline,
2838 __ mov(
edx, Immediate(entry));
2852 __ mov(Operand::StaticVariable(js_entry_sp), Immediate(0));
2853 __ bind(¬_outermost_js_2);
2856 __ pop(Operand::StaticVariable(ExternalReference(
2857 Isolate::kCEntryFPAddress,
2864 __ add(
esp, Immediate(2 * kPointerSize));
2890 ASSERT(HasArgsInRegisters() || !HasCallSiteInlineCheck());
2893 Register
object =
eax;
2895 Register
function =
edx;
2896 Register prototype =
edi;
2897 Register scratch =
ecx;
2900 static const int kDeltaToCmpImmediate = 2;
2901 static const int kDeltaToMov = 8;
2902 static const int kDeltaToMovImmediate = 9;
2903 static const int8_t kCmpEdiOperandByte1 = BitCast<int8_t, uint8_t>(0x3b);
2904 static const int8_t kCmpEdiOperandByte2 = BitCast<int8_t, uint8_t>(0x3d);
2905 static const int8_t kMovEaxImmediateByte = BitCast<int8_t, uint8_t>(0xb8);
2911 Label slow, not_js_object;
2912 if (!HasArgsInRegisters()) {
2913 __ mov(
object, Operand(
esp, 2 * kPointerSize));
2914 __ mov(
function, Operand(
esp, 1 * kPointerSize));
2918 __ JumpIfSmi(
object, ¬_js_object);
2919 __ IsObjectJSObjectType(
object, map, scratch, ¬_js_object);
2923 if (!HasCallSiteInlineCheck()) {
2926 __ CompareRoot(
function, scratch, Heap::kInstanceofCacheFunctionRootIndex);
2928 __ CompareRoot(map, scratch, Heap::kInstanceofCacheMapRootIndex);
2930 __ LoadRoot(
eax, Heap::kInstanceofCacheAnswerRootIndex);
2931 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
2936 __ TryGetFunctionPrototype(
function, prototype, scratch, &slow,
true);
2939 __ JumpIfSmi(prototype, &slow);
2940 __ IsObjectJSObjectType(prototype, scratch, scratch, &slow);
2944 if (!HasCallSiteInlineCheck()) {
2945 __ StoreRoot(map, scratch, Heap::kInstanceofCacheMapRootIndex);
2946 __ StoreRoot(
function, scratch, Heap::kInstanceofCacheFunctionRootIndex);
2950 ASSERT(HasArgsInRegisters());
2952 __ mov(scratch, Operand(
esp, 0 * kPointerSize));
2953 __ sub(scratch, Operand(
esp, 1 * kPointerSize));
2954 if (FLAG_debug_code) {
2955 __ cmpb(Operand(scratch, 0), kCmpEdiOperandByte1);
2956 __ Assert(
equal, kInstanceofStubUnexpectedCallSiteCacheCmp1);
2957 __ cmpb(Operand(scratch, 1), kCmpEdiOperandByte2);
2958 __ Assert(
equal, kInstanceofStubUnexpectedCallSiteCacheCmp2);
2960 __ mov(scratch, Operand(scratch, kDeltaToCmpImmediate));
2961 __ mov(Operand(scratch, 0), map);
2967 Label loop, is_instance, is_not_instance;
2969 __ cmp(scratch, prototype);
2970 __ j(
equal, &is_instance, Label::kNear);
2971 Factory* factory = masm->isolate()->factory();
2972 __ cmp(scratch, Immediate(factory->null_value()));
2973 __ j(
equal, &is_not_instance, Label::kNear);
2978 __ bind(&is_instance);
2979 if (!HasCallSiteInlineCheck()) {
2980 __ mov(
eax, Immediate(0));
2981 __ StoreRoot(
eax, scratch, Heap::kInstanceofCacheAnswerRootIndex);
2984 __ mov(
eax, factory->true_value());
2985 __ mov(scratch, Operand(
esp, 0 * kPointerSize));
2986 __ sub(scratch, Operand(
esp, 1 * kPointerSize));
2987 if (FLAG_debug_code) {
2988 __ cmpb(Operand(scratch, kDeltaToMov), kMovEaxImmediateByte);
2989 __ Assert(
equal, kInstanceofStubUnexpectedCallSiteCacheMov);
2991 __ mov(Operand(scratch, kDeltaToMovImmediate),
eax);
2992 if (!ReturnTrueFalseObject()) {
2993 __ Move(
eax, Immediate(0));
2996 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
2998 __ bind(&is_not_instance);
2999 if (!HasCallSiteInlineCheck()) {
3001 __ StoreRoot(
eax, scratch, Heap::kInstanceofCacheAnswerRootIndex);
3004 __ mov(
eax, factory->false_value());
3005 __ mov(scratch, Operand(
esp, 0 * kPointerSize));
3006 __ sub(scratch, Operand(
esp, 1 * kPointerSize));
3007 if (FLAG_debug_code) {
3008 __ cmpb(Operand(scratch, kDeltaToMov), kMovEaxImmediateByte);
3009 __ Assert(
equal, kInstanceofStubUnexpectedCallSiteCacheMov);
3011 __ mov(Operand(scratch, kDeltaToMovImmediate),
eax);
3012 if (!ReturnTrueFalseObject()) {
3016 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
3018 Label object_not_null, object_not_null_or_smi;
3019 __ bind(¬_js_object);
3022 __ JumpIfSmi(
function, &slow, Label::kNear);
3027 __ cmp(
object, factory->null_value());
3030 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
3032 __ bind(&object_not_null);
3034 __ JumpIfNotSmi(
object, &object_not_null_or_smi, Label::kNear);
3036 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
3038 __ bind(&object_not_null_or_smi);
3040 Condition is_string = masm->IsObjectStringType(
object, scratch, scratch);
3043 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
3047 if (!ReturnTrueFalseObject()) {
3049 if (HasArgsInRegisters()) {
3065 Label true_value, done;
3067 __ j(
zero, &true_value, Label::kNear);
3068 __ mov(
eax, factory->false_value());
3069 __ jmp(&done, Label::kNear);
3070 __ bind(&true_value);
3071 __ mov(
eax, factory->true_value());
3073 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
3090 __ JumpIfSmi(object_, receiver_not_string_);
3101 __ JumpIfNotSmi(index_, &index_not_smi_);
3102 __ bind(&got_smi_index_);
3108 __ SmiUntag(index_);
3110 Factory* factory = masm->isolate()->factory();
3112 masm, factory, object_, index_, result_, &call_runtime_);
3120 MacroAssembler* masm,
3121 const RuntimeCallHelper& call_helper) {
3122 __ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase);
3125 __ bind(&index_not_smi_);
3128 masm->isolate()->factory()->heap_number_map(),
3131 call_helper.BeforeCall(masm);
3135 __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero, 1);
3139 __ CallRuntime(Runtime::kHiddenNumberToSmi, 1);
3141 if (!index_.
is(
eax)) {
3144 __ mov(index_,
eax);
3150 call_helper.AfterCall(masm);
3153 __ JumpIfNotSmi(index_, index_out_of_range_);
3155 __ jmp(&got_smi_index_);
3160 __ bind(&call_runtime_);
3161 call_helper.BeforeCall(masm);
3165 __ CallRuntime(Runtime::kHiddenStringCharCodeAt, 2);
3166 if (!result_.
is(
eax)) {
3167 __ mov(result_,
eax);
3169 call_helper.AfterCall(masm);
3172 __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase);
3189 Factory* factory = masm->isolate()->factory();
3190 __ Move(result_, Immediate(factory->single_character_string_cache()));
3198 __ cmp(result_, factory->undefined_value());
3205 MacroAssembler* masm,
3206 const RuntimeCallHelper& call_helper) {
3207 __ Abort(kUnexpectedFallthroughToCharFromCodeSlowCase);
3209 __ bind(&slow_case_);
3210 call_helper.BeforeCall(masm);
3212 __ CallRuntime(Runtime::kCharFromCode, 1);
3213 if (!result_.
is(
eax)) {
3214 __ mov(result_,
eax);
3216 call_helper.AfterCall(masm);
3219 __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase);
3235 ASSERT(!scratch.is(dest));
3236 ASSERT(!scratch.is(src));
3237 ASSERT(!scratch.is(count));
3241 __ test(count, count);
3251 __ test(count, Immediate(~3));
3252 __ j(
zero, &last_bytes, Label::kNear);
3255 __ mov(scratch, count);
3261 __ mov(count, scratch);
3265 __ bind(&last_bytes);
3266 __ test(count, count);
3272 __ mov_b(scratch, Operand(src, 0));
3273 __ mov_b(Operand(dest, 0), scratch);
3274 __ add(src, Immediate(1));
3275 __ add(dest, Immediate(1));
3276 __ sub(count, Immediate(1));
3289 __ LoadRoot(scratch, Heap::kHashSeedRootIndex);
3290 __ SmiUntag(scratch);
3291 __ add(scratch, character);
3292 __ mov(hash, scratch);
3293 __ shl(scratch, 10);
3294 __ add(hash, scratch);
3296 int32_t seed = masm->isolate()->heap()->HashSeed();
3297 __ lea(scratch, Operand(character, seed));
3298 __ shl(scratch, 10);
3299 __ lea(hash, Operand(scratch, character,
times_1, seed));
3302 __ mov(scratch, hash);
3304 __ xor_(hash, scratch);
3313 __ add(hash, character);
3315 __ mov(scratch, hash);
3316 __ shl(scratch, 10);
3317 __ add(hash, scratch);
3319 __ mov(scratch, hash);
3321 __ xor_(hash, scratch);
3329 __ mov(scratch, hash);
3331 __ add(hash, scratch);
3333 __ mov(scratch, hash);
3334 __ shr(scratch, 11);
3335 __ xor_(hash, scratch);
3337 __ mov(scratch, hash);
3338 __ shl(scratch, 15);
3339 __ add(hash, scratch);
3344 Label hash_not_zero;
3345 __ j(
not_zero, &hash_not_zero, Label::kNear);
3347 __ bind(&hash_not_zero);
3351 void SubStringStub::Generate(MacroAssembler* masm) {
3361 __ mov(
eax, Operand(
esp, 3 * kPointerSize));
3363 __ JumpIfSmi(
eax, &runtime);
3371 __ mov(
ecx, Operand(
esp, 1 * kPointerSize));
3372 __ JumpIfNotSmi(
ecx, &runtime);
3373 __ mov(
edx, Operand(
esp, 2 * kPointerSize));
3374 __ JumpIfNotSmi(
edx, &runtime);
3377 Label not_original_string;
3379 __ j(
below, ¬_original_string, Label::kNear);
3383 Counters* counters = masm->isolate()->counters();
3384 __ IncrementCounter(counters->sub_string_native(), 1);
3385 __ ret(3 * kPointerSize);
3386 __ bind(¬_original_string);
3398 Label underlying_unpacked, sliced_string, seq_or_external_string;
3403 __ j(
zero, &seq_or_external_string, Label::kNear);
3405 Factory* factory = masm->isolate()->factory();
3407 __ j(
not_zero, &sliced_string, Label::kNear);
3411 factory->empty_string());
3417 __ jmp(&underlying_unpacked, Label::kNear);
3419 __ bind(&sliced_string);
3426 __ jmp(&underlying_unpacked, Label::kNear);
3428 __ bind(&seq_or_external_string);
3432 __ bind(&underlying_unpacked);
3434 if (FLAG_string_slices) {
3442 __ j(
less, ©_routine);
3448 Label two_byte_slice, set_slice_header;
3452 __ j(
zero, &two_byte_slice, Label::kNear);
3454 __ jmp(&set_slice_header, Label::kNear);
3455 __ bind(&two_byte_slice);
3457 __ bind(&set_slice_header);
3463 __ IncrementCounter(counters->sub_string_native(), 1);
3464 __ ret(3 * kPointerSize);
3466 __ bind(©_routine);
3475 Label two_byte_sequential, runtime_drop_two, sequential_string;
3479 __ j(
zero, &sequential_string);
3491 __ bind(&sequential_string);
3498 __ j(
zero, &two_byte_sequential);
3522 __ IncrementCounter(counters->sub_string_native(), 1);
3523 __ ret(3 * kPointerSize);
3525 __ bind(&two_byte_sequential);
3552 __ IncrementCounter(counters->sub_string_native(), 1);
3553 __ ret(3 * kPointerSize);
3556 __ bind(&runtime_drop_two);
3561 __ TailCallRuntime(Runtime::kHiddenSubString, 3, 1);
3563 __ bind(&single_char);
3568 StringCharAtGenerator generator(
3570 generator.GenerateFast(masm);
3571 __ ret(3 * kPointerSize);
3572 generator.SkipSlow(masm, &runtime);
3580 Register scratch2) {
3581 Register length = scratch1;
3584 Label strings_not_equal, check_zero_length;
3587 __ j(
equal, &check_zero_length, Label::kNear);
3588 __ bind(&strings_not_equal);
3593 Label compare_chars;
3594 __ bind(&check_zero_length);
3596 __ test(length, length);
3597 __ j(
not_zero, &compare_chars, Label::kNear);
3602 __ bind(&compare_chars);
3603 GenerateAsciiCharsCompareLoop(masm, left, right, length, scratch2,
3604 &strings_not_equal, Label::kNear);
3617 Register scratch3) {
3618 Counters* counters = masm->isolate()->counters();
3619 __ IncrementCounter(counters->string_compare_native(), 1);
3624 __ mov(scratch3, scratch1);
3627 Register length_delta = scratch3;
3631 __ sub(scratch1, length_delta);
3632 __ bind(&left_shorter);
3634 Register min_length = scratch1;
3637 Label compare_lengths;
3638 __ test(min_length, min_length);
3639 __ j(
zero, &compare_lengths, Label::kNear);
3642 Label result_not_equal;
3643 GenerateAsciiCharsCompareLoop(masm, left, right, min_length, scratch2,
3644 &result_not_equal, Label::kNear);
3647 __ bind(&compare_lengths);
3648 __ test(length_delta, length_delta);
3649 Label length_not_equal;
3650 __ j(
not_zero, &length_not_equal, Label::kNear);
3658 Label result_greater;
3660 __ bind(&length_not_equal);
3661 __ j(
greater, &result_greater, Label::kNear);
3662 __ jmp(&result_less, Label::kNear);
3663 __ bind(&result_not_equal);
3664 __ j(
above, &result_greater, Label::kNear);
3665 __ bind(&result_less);
3672 __ bind(&result_greater);
3678 void StringCompareStub::GenerateAsciiCharsCompareLoop(
3679 MacroAssembler* masm,
3684 Label* chars_not_equal,
3685 Label::Distance chars_not_equal_near) {
3689 __ SmiUntag(length);
3695 Register index = length;
3700 __ mov_b(scratch, Operand(left, index,
times_1, 0));
3701 __ cmpb(scratch, Operand(right, index,
times_1, 0));
3702 __ j(
not_equal, chars_not_equal, chars_not_equal_near);
3708 void StringCompareStub::Generate(MacroAssembler* masm) {
3716 __ mov(
edx, Operand(
esp, 2 * kPointerSize));
3717 __ mov(
eax, Operand(
esp, 1 * kPointerSize));
3725 __ IncrementCounter(masm->isolate()->counters()->string_compare_native(), 1);
3726 __ ret(2 * kPointerSize);
3731 __ JumpIfNotBothSequentialAsciiStrings(
edx,
eax,
ecx,
ebx, &runtime);
3736 __ add(
esp, Immediate(2 * kPointerSize));
3743 __ TailCallRuntime(Runtime::kHiddenStringCompare, 2, 1);
3748 int argc = arguments_count();
3753 __ ret((argc + 1) * kPointerSize);
3757 Isolate* isolate = masm->isolate();
3760 __ TailCallExternalReference(
3761 ExternalReference(Builtins::c_ArrayPush, isolate), argc + 1, 1);
3765 Label call_builtin, attempt_to_grow_elements, with_write_barrier;
3773 isolate->factory()->fixed_array_map());
3790 __ j(
greater, &attempt_to_grow_elements);
3793 __ mov(
ecx, Operand(
esp, argc * kPointerSize));
3794 __ JumpIfNotSmi(
ecx, &with_write_barrier);
3803 __ mov(
ecx, Operand(
esp, argc * kPointerSize));
3804 __ StoreNumberToDoubleElements(
3810 __ ret((argc + 1) * kPointerSize);
3813 __ bind(&call_builtin);
3814 __ TailCallExternalReference(
3815 ExternalReference(Builtins::c_ArrayPush, isolate), argc + 1, 1);
3819 __ bind(&with_write_barrier);
3822 if (FLAG_trace_elements_transitions)
__ jmp(&call_builtin);
3825 isolate->factory()->heap_number_map());
3835 const int origin_offset = header_size + elements_kind() *
kPointerSize;
3840 const int target_offset = header_size + target_kind *
kPointerSize;
3860 __ ret((argc + 1) * kPointerSize);
3862 __ bind(&attempt_to_grow_elements);
3863 if (!FLAG_inline_new) {
3864 __ bind(&call_builtin);
3865 __ TailCallExternalReference(
3866 ExternalReference(Builtins::c_ArrayPush, isolate), argc + 1, 1);
3870 __ mov(
ebx, Operand(
esp, argc * kPointerSize));
3874 __ JumpIfNotSmi(
ebx, &call_builtin);
3880 ExternalReference new_space_allocation_top =
3881 ExternalReference::new_space_allocation_top_address(isolate);
3882 ExternalReference new_space_allocation_limit =
3883 ExternalReference::new_space_allocation_limit_address(isolate);
3885 const int kAllocationDelta = 4;
3886 ASSERT(kAllocationDelta >= argc);
3888 __ mov(
ecx, Operand::StaticVariable(new_space_allocation_top));
3895 __ add(
ecx, Immediate(kAllocationDelta * kPointerSize));
3896 __ cmp(
ecx, Operand::StaticVariable(new_space_allocation_limit));
3900 __ mov(Operand::StaticVariable(new_space_allocation_top),
ecx);
3905 for (
int i = 1; i < kAllocationDelta; i++) {
3906 __ mov(Operand(
edx, i * kPointerSize),
3907 isolate->factory()->the_hole_value());
3920 __ mov(
edx, Operand(
esp, (argc + 1) * kPointerSize));
3931 __ ret((argc + 1) * kPointerSize);
3933 __ bind(&call_builtin);
3934 __ TailCallExternalReference(
3935 ExternalReference(Builtins::c_ArrayPush, isolate), argc + 1, 1);
3939 void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) {
3945 Isolate* isolate = masm->isolate();
3950 __ mov(
ecx,
handle(isolate->heap()->undefined_value()));
3953 if (FLAG_debug_code) {
3957 isolate->factory()->allocation_site_map());
3958 __ Assert(
equal, kExpectedAllocationSite);
3963 BinaryOpWithAllocationSiteStub stub(state_);
3964 __ TailCallStub(&stub);
3968 void ICCompareStub::GenerateSmis(MacroAssembler* masm) {
3973 __ JumpIfNotSmi(
ecx, &miss, Label::kNear);
3975 if (GetCondition() ==
equal) {
3994 void ICCompareStub::GenerateNumbers(MacroAssembler* masm) {
3998 Label unordered, maybe_undefined1, maybe_undefined2;
4002 __ JumpIfNotSmi(
edx, &miss);
4005 __ JumpIfNotSmi(
eax, &miss);
4011 CpuFeatureScope scope1(masm,
SSE2);
4012 CpuFeatureScope scope2(masm,
CMOV);
4015 Label done, left, left_smi, right_smi;
4016 __ JumpIfSmi(
eax, &right_smi, Label::kNear);
4018 masm->isolate()->factory()->heap_number_map());
4021 __ jmp(&left, Label::kNear);
4022 __ bind(&right_smi);
4028 __ JumpIfSmi(
edx, &left_smi, Label::kNear);
4030 masm->isolate()->factory()->heap_number_map());
4057 __ JumpIfSmi(
ecx, &generic_stub, Label::kNear);
4060 masm->isolate()->factory()->heap_number_map());
4063 masm->isolate()->factory()->heap_number_map());
4067 __ bind(&unordered);
4068 __ bind(&generic_stub);
4071 __ jmp(stub.GetCode(masm->isolate()), RelocInfo::CODE_TARGET);
4073 __ bind(&maybe_undefined1);
4075 __ cmp(
eax, Immediate(masm->isolate()->factory()->undefined_value()));
4077 __ JumpIfSmi(
edx, &unordered);
4083 __ bind(&maybe_undefined2);
4085 __ cmp(
edx, Immediate(masm->isolate()->factory()->undefined_value()));
4094 void ICCompareStub::GenerateInternalizedStrings(MacroAssembler* masm) {
4099 Register left =
edx;
4100 Register right =
eax;
4101 Register tmp1 =
ecx;
4102 Register tmp2 =
ebx;
4108 __ and_(tmp1, right);
4109 __ JumpIfSmi(tmp1, &miss, Label::kNear);
4123 __ cmp(left, right);
4139 void ICCompareStub::GenerateUniqueNames(MacroAssembler* masm) {
4144 Register left =
edx;
4145 Register right =
eax;
4146 Register tmp1 =
ecx;
4147 Register tmp2 =
ebx;
4153 __ and_(tmp1, right);
4154 __ JumpIfSmi(tmp1, &miss, Label::kNear);
4163 __ JumpIfNotUniqueName(tmp1, &miss, Label::kNear);
4164 __ JumpIfNotUniqueName(tmp2, &miss, Label::kNear);
4168 __ cmp(left, right);
4184 void ICCompareStub::GenerateStrings(MacroAssembler* masm) {
4191 Register left =
edx;
4192 Register right =
eax;
4193 Register tmp1 =
ecx;
4194 Register tmp2 =
ebx;
4195 Register tmp3 =
edi;
4200 __ and_(tmp1, right);
4201 __ JumpIfSmi(tmp1, &miss);
4217 __ cmp(left, right);
4241 __ bind(&do_compare);
4246 __ JumpIfNotBothSequentialAsciiStrings(left, right, tmp1, tmp2, &runtime);
4251 masm, left, right, tmp1, tmp2);
4254 masm, left, right, tmp1, tmp2, tmp3);
4264 __ TailCallRuntime(Runtime::kStringEquals, 2, 1);
4266 __ TailCallRuntime(Runtime::kHiddenStringCompare, 2, 1);
4274 void ICCompareStub::GenerateObjects(MacroAssembler* masm) {
4279 __ JumpIfSmi(
ecx, &miss, Label::kNear);
4295 void ICCompareStub::GenerateKnownObjects(MacroAssembler* masm) {
4299 __ JumpIfSmi(
ecx, &miss, Label::kNear);
4303 __ cmp(
ecx, known_map_);
4305 __ cmp(
ebx, known_map_);
4316 void ICCompareStub::GenerateMiss(MacroAssembler* masm) {
4319 ExternalReference miss = ExternalReference(IC_Utility(IC::kCompareIC_Miss),
4327 __ CallExternalReference(miss, 3);
4347 Register properties,
4350 ASSERT(name->IsUniqueName());
4357 for (
int i = 0; i < kInlinedProbes; i++) {
4359 Register index =
r0;
4365 NameDictionary::GetProbeOffset(i))));
4369 __ lea(index, Operand(index, index,
times_2, 0));
4370 Register entity_name =
r0;
4375 __ cmp(entity_name, masm->isolate()->factory()->undefined_value());
4379 __ cmp(entity_name, Handle<Name>(name));
4384 __ cmp(entity_name, masm->isolate()->factory()->the_hole_value());
4385 __ j(
equal, &good, Label::kNear);
4395 __ push(Immediate(Handle<Object>(name)));
4396 __ push(Immediate(name->Hash()));
4415 ASSERT(!elements.is(r0));
4416 ASSERT(!elements.is(r1));
4420 __ AssertName(name);
4429 for (
int i = 0; i < kInlinedProbes; i++) {
4434 __ add(r0, Immediate(NameDictionary::GetProbeOffset(i)));
4440 __ lea(r0, Operand(r0, r0,
times_2, 0));
4443 __ cmp(name, Operand(elements,
4478 Label in_dictionary, maybe_in_dictionary, not_in_dictionary;
4480 Register scratch = result_;
4484 __ SmiUntag(scratch);
4492 for (
int i = kInlinedProbes; i < kTotalProbes; i++) {
4494 __ mov(scratch, Operand(
esp, 2 * kPointerSize));
4496 __ add(scratch, Immediate(NameDictionary::GetProbeOffset(i)));
4498 __ and_(scratch, Operand(
esp, 0));
4502 __ lea(index_, Operand(scratch, scratch,
times_2, 0));
4506 __ mov(scratch, Operand(dictionary_,
4510 __ cmp(scratch, masm->isolate()->factory()->undefined_value());
4511 __ j(
equal, ¬_in_dictionary);
4514 __ cmp(scratch, Operand(
esp, 3 * kPointerSize));
4525 &maybe_in_dictionary);
4529 __ bind(&maybe_in_dictionary);
4534 __ mov(result_, Immediate(0));
4536 __ ret(2 * kPointerSize);
4539 __ bind(&in_dictionary);
4540 __ mov(result_, Immediate(1));
4542 __ ret(2 * kPointerSize);
4544 __ bind(¬_in_dictionary);
4545 __ mov(result_, Immediate(0));
4547 __ ret(2 * kPointerSize);
4554 stub.GetCode(isolate);
4557 stub2.GetCode(isolate);
4562 bool CodeStub::CanUseFPRegisters() {
4571 void RecordWriteStub::Generate(MacroAssembler* masm) {
4572 Label skip_to_incremental_noncompacting;
4573 Label skip_to_incremental_compacting;
4579 __ jmp(&skip_to_incremental_noncompacting, Label::kNear);
4580 __ jmp(&skip_to_incremental_compacting, Label::kFar);
4583 __ RememberedSetHelper(object_,
4592 __ bind(&skip_to_incremental_noncompacting);
4595 __ bind(&skip_to_incremental_compacting);
4605 void RecordWriteStub::GenerateIncremental(MacroAssembler* masm, Mode
mode) {
4609 Label dont_need_remembered_set;
4611 __ mov(regs_.scratch0(), Operand(regs_.address(), 0));
4612 __ JumpIfNotInNewSpace(regs_.scratch0(),
4614 &dont_need_remembered_set);
4616 __ CheckPageFlag(regs_.object(),
4620 &dont_need_remembered_set);
4624 CheckNeedsToInformIncrementalMarker(
4626 kUpdateRememberedSetOnNoNeedToInformIncrementalMarker,
4628 InformIncrementalMarker(masm);
4629 regs_.Restore(masm);
4630 __ RememberedSetHelper(object_,
4636 __ bind(&dont_need_remembered_set);
4639 CheckNeedsToInformIncrementalMarker(
4641 kReturnOnNoNeedToInformIncrementalMarker,
4643 InformIncrementalMarker(masm);
4644 regs_.Restore(masm);
4649 void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm) {
4650 regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode_);
4651 int argument_count = 3;
4652 __ PrepareCallCFunction(argument_count, regs_.scratch0());
4653 __ mov(Operand(
esp, 0 * kPointerSize), regs_.object());
4654 __ mov(Operand(
esp, 1 * kPointerSize), regs_.address());
4655 __ mov(Operand(
esp, 2 * kPointerSize),
4656 Immediate(ExternalReference::isolate_address(masm->isolate())));
4658 AllowExternalCallThatCantCauseGC scope(masm);
4660 ExternalReference::incremental_marking_record_write_function(
4664 regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode_);
4668 void RecordWriteStub::CheckNeedsToInformIncrementalMarker(
4669 MacroAssembler* masm,
4670 OnNoNeedToInformIncrementalMarker on_no_need,
4672 Label object_is_black, need_incremental, need_incremental_pop_object;
4675 __ and_(regs_.scratch0(), regs_.object());
4676 __ mov(regs_.scratch1(),
4677 Operand(regs_.scratch0(),
4679 __ sub(regs_.scratch1(), Immediate(1));
4680 __ mov(Operand(regs_.scratch0(),
4687 __ JumpIfBlack(regs_.object(),
4693 regs_.Restore(masm);
4694 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
4695 __ RememberedSetHelper(object_,
4704 __ bind(&object_is_black);
4707 __ mov(regs_.scratch0(), Operand(regs_.address(), 0));
4710 Label ensure_not_white;
4712 __ CheckPageFlag(regs_.scratch0(),
4719 __ CheckPageFlag(regs_.object(),
4726 __ jmp(&need_incremental);
4728 __ bind(&ensure_not_white);
4733 __ push(regs_.object());
4734 __ EnsureNotWhite(regs_.scratch0(),
4737 &need_incremental_pop_object,
4739 __ pop(regs_.object());
4741 regs_.Restore(masm);
4742 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
4743 __ RememberedSetHelper(object_,
4752 __ bind(&need_incremental_pop_object);
4753 __ pop(regs_.object());
4755 __ bind(&need_incremental);
4761 void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) {
4772 Label double_elements;
4774 Label slow_elements;
4775 Label slow_elements_from_double;
4776 Label fast_elements;
4779 __ mov(
edx, Operand(
esp, 1 * kPointerSize));
4780 __ mov(
ebx, Operand(
esp, 2 * kPointerSize));
4783 __ CheckFastElements(
edi, &double_elements);
4786 __ JumpIfSmi(
eax, &smi_element);
4787 __ CheckFastSmiElements(
edi, &fast_elements, Label::kNear);
4792 __ bind(&slow_elements);
4803 __ TailCallRuntime(Runtime::kStoreArrayLiteralElement, 5, 1);
4805 __ bind(&slow_elements_from_double);
4807 __ jmp(&slow_elements);
4810 __ bind(&fast_elements);
4824 __ bind(&smi_element);
4831 __ bind(&double_elements);
4835 __ StoreNumberToDoubleElements(
eax,
4840 &slow_elements_from_double,
4847 void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
4849 __ call(ces.GetCode(masm->isolate()), RelocInfo::CODE_TARGET);
4850 int parameter_count_offset =
4853 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE);
4864 if (masm->isolate()->function_entry_hook() !=
NULL) {
4866 masm->CallStub(&stub);
4871 void ProfileEntryHookStub::Generate(MacroAssembler* masm) {
4873 const int kNumSavedRegisters = 3;
4879 __ lea(
eax, Operand(
esp, (kNumSavedRegisters + 1) * kPointerSize));
4884 __ mov(
eax, Operand(
esp, (kNumSavedRegisters + 1) * kPointerSize));
4889 ASSERT(masm->isolate()->function_entry_hook() !=
NULL);
4892 __ add(
esp, Immediate(2 * kPointerSize));
4904 static void CreateArrayDispatch(MacroAssembler* masm,
4909 __ TailCallStub(&stub);
4913 for (
int i = 0; i <= last_index; ++i) {
4919 __ TailCallStub(&stub);
4924 __ Abort(kUnexpectedElementsKindInArrayConstructor);
4931 static void CreateArrayDispatchOneArgument(MacroAssembler* masm,
4939 Label normal_sequence;
4950 __ j(not_zero, &normal_sequence);
4954 __ mov(
ecx, Operand(
esp, kPointerSize));
4956 __ j(zero, &normal_sequence);
4962 ArraySingleArgumentConstructorStub stub_holey(holey_initial,
4964 __ TailCallStub(&stub_holey);
4966 __ bind(&normal_sequence);
4967 ArraySingleArgumentConstructorStub stub(initial,
4969 __ TailCallStub(&stub);
4975 if (FLAG_debug_code) {
4976 Handle<Map> allocation_site_map =
4977 masm->isolate()->factory()->allocation_site_map();
4979 __ Assert(
equal, kExpectedAllocationSite);
4989 __ bind(&normal_sequence);
4992 for (
int i = 0; i <= last_index; ++i) {
4997 ArraySingleArgumentConstructorStub stub(kind);
4998 __ TailCallStub(&stub);
5003 __ Abort(kUnexpectedElementsKindInArrayConstructor);
5011 static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) {
5014 for (
int i = 0; i <= to_index; ++i) {
5017 stub.GetCode(isolate);
5020 stub1.GetCode(isolate);
5027 ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>(
5029 ArrayConstructorStubAheadOfTimeHelper<ArraySingleArgumentConstructorStub>(
5031 ArrayConstructorStubAheadOfTimeHelper<ArrayNArgumentsConstructorStub>(
5039 for (
int i = 0; i < 2; i++) {
5041 InternalArrayNoArgumentConstructorStub stubh1(kinds[i]);
5042 stubh1.GetCode(isolate);
5043 InternalArraySingleArgumentConstructorStub stubh2(kinds[i]);
5044 stubh2.GetCode(isolate);
5045 InternalArrayNArgumentsConstructorStub stubh3(kinds[i]);
5046 stubh3.GetCode(isolate);
5051 void ArrayConstructorStub::GenerateDispatchToArrayStub(
5052 MacroAssembler* masm,
5054 if (argument_count_ ==
ANY) {
5055 Label not_zero_case, not_one_case;
5057 __ j(not_zero, ¬_zero_case);
5058 CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm,
mode);
5060 __ bind(¬_zero_case);
5063 CreateArrayDispatchOneArgument(masm, mode);
5065 __ bind(¬_one_case);
5066 CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm,
mode);
5067 }
else if (argument_count_ ==
NONE) {
5068 CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm,
mode);
5069 }
else if (argument_count_ ==
ONE) {
5070 CreateArrayDispatchOneArgument(masm, mode);
5072 CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm,
mode);
5087 if (FLAG_debug_code) {
5095 __ Assert(not_zero, kUnexpectedInitialMapForArrayFunction);
5097 __ Assert(
equal, kUnexpectedInitialMapForArrayFunction);
5100 __ AssertUndefinedOrAllocationSite(
ebx);
5106 __ cmp(
ebx, masm->isolate()->factory()->undefined_value());
5121 void InternalArrayConstructorStub::GenerateCase(
5123 Label not_zero_case, not_one_case;
5124 Label normal_sequence;
5127 __ j(not_zero, ¬_zero_case);
5128 InternalArrayNoArgumentConstructorStub stub0(kind);
5129 __ TailCallStub(&stub0);
5131 __ bind(¬_zero_case);
5138 __ mov(
ecx, Operand(
esp, kPointerSize));
5140 __ j(zero, &normal_sequence);
5142 InternalArraySingleArgumentConstructorStub
5144 __ TailCallStub(&stub1_holey);
5147 __ bind(&normal_sequence);
5148 InternalArraySingleArgumentConstructorStub stub1(kind);
5149 __ TailCallStub(&stub1);
5151 __ bind(¬_one_case);
5152 InternalArrayNArgumentsConstructorStub stubN(kind);
5153 __ TailCallStub(&stubN);
5165 if (FLAG_debug_code) {
5173 __ Assert(not_zero, kUnexpectedInitialMapForArrayFunction);
5175 __ Assert(
equal, kUnexpectedInitialMapForArrayFunction);
5188 if (FLAG_debug_code) {
5194 kInvalidElementsKindForInternalArrayOrInternalPackedArray);
5198 Label fast_elements_case;
5200 __ j(
equal, &fast_elements_case);
5203 __ bind(&fast_elements_case);
5208 void CallApiFunctionStub::Generate(MacroAssembler* masm) {
5223 Register callee =
eax;
5224 Register call_data =
ebx;
5225 Register holder =
ecx;
5226 Register api_function_address =
edx;
5227 Register return_address =
edi;
5228 Register context =
esi;
5230 int argc = ArgumentBits::decode(bit_field_);
5231 bool is_store = IsStoreBits::decode(bit_field_);
5232 bool call_data_undefined = CallDataUndefinedBits::decode(bit_field_);
5234 typedef FunctionCallbackArguments FCA;
5245 Isolate* isolate = masm->isolate();
5247 __ pop(return_address);
5260 Register scratch = call_data;
5261 if (!call_data_undefined) {
5263 __ push(Immediate(isolate->factory()->undefined_value()));
5265 __ push(Immediate(isolate->factory()->undefined_value()));
5273 __ push(Immediate(reinterpret_cast<int>(isolate)));
5277 __ mov(scratch,
esp);
5280 __ push(return_address);
5286 const int kApiArgc = 1 + 1;
5290 const int kApiStackSpace = 4;
5292 __ PrepareCallApiFunction(kApiArgc + kApiStackSpace);
5296 __ add(scratch, Immediate((argc + FCA::kArgsLength - 1) * kPointerSize));
5310 Operand context_restore_operand(
ebp,
5311 (2 + FCA::kContextSaveIndex) * kPointerSize);
5313 int return_value_offset = 0;
5315 return_value_offset = 2 + FCA::kArgsLength;
5317 return_value_offset = 2 + FCA::kReturnValueOffset;
5319 Operand return_value_operand(
ebp, return_value_offset * kPointerSize);
5320 __ CallApiFunctionAndReturn(api_function_address,
5323 argc + FCA::kArgsLength + 1,
5324 return_value_operand,
5325 &context_restore_operand);
5329 void CallApiGetterStub::Generate(MacroAssembler* masm) {
5343 const int kApiArgc = 2 + 1;
5345 Register api_function_address =
edx;
5346 Register scratch =
ebx;
5349 __ lea(scratch, Operand(
esp, 1 * kPointerSize));
5351 __ PrepareCallApiFunction(kApiArgc);
5353 __ add(scratch, Immediate(kPointerSize));
5358 __ CallApiFunctionAndReturn(api_function_address,
5362 Operand(
ebp, 7 * kPointerSize),
5371 #endif // V8_TARGET_ARCH_IA32
static const int kResourceDataOffset
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter NULL
static const int kElementsKindMask
void GenerateFast(MacroAssembler *masm)
static const int kLengthOffset
static const int kHashFieldOffset
static const int kBitFieldOffset
void GenerateFast(MacroAssembler *masm)
STATIC_CHECK((kStringRepresentationMask|kStringEncodingMask)==Internals::kFullStringRepresentationMask)
static const int kArgsLength
const intptr_t kSmiTagMask
static const int kEvacuationCandidateMask
void GenerateSlow(MacroAssembler *masm, const RuntimeCallHelper &call_helper)
bool IsHoleyElementsKind(ElementsKind kind)
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
static const int kPrototypeOrInitialMapOffset
#define COMPARE(asm_, compare_string)
static int SlotOffset(int index)
static void GenerateAheadOfTime(Isolate *isolate)
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf map
static const uint32_t kMask
static const int kCallerStackParameterCountFrameOffset
void Generate(MacroAssembler *masm)
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
const uint32_t kTwoByteStringTag
const int kFailureTypeTagSize
static const uint32_t kExponentMask
static Failure * InternalError()
static Smi * FromInt(int value)
bool IsFastObjectElementsKind(ElementsKind kind)
void Generate(MacroAssembler *masm)
static const byte kTwoByteNopInstruction
static const int kDataOffset
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
static const int kJSRegexpStaticOffsetsVectorSize
static Representation Integer32()
static void GenerateAheadOfTime(Isolate *isolate)
static const int kNativeByteOffset
static void GenerateHashGetHash(MacroAssembler *masm, Register hash)
static const uint64_t kHiddenBit
static const int kExponentBias
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor) V8_OVERRIDE
static const intptr_t kPageAlignmentMask
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
static bool IsSupported(CpuFeature f)
static const int kStrictModeBitWithinByte
static Failure * Exception()
const uint32_t kIsNotInternalizedMask
AllocationSiteOverrideMode
static const Function * FunctionForId(FunctionId id)
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
#define ASSERT(condition)
void Generate(MacroAssembler *masm)
static const int kContextOffset
static const int kNumRegisters
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
static Handle< Object > MegamorphicSentinel(Isolate *isolate)
static bool IsSafeForSnapshot(CpuFeature f)
static void GenerateCompareFlatAsciiStrings(MacroAssembler *masm, Register left, Register right, Register scratch1, Register scratch2, Register scratch3, Register scratch4)
const uint32_t kStringRepresentationMask
static const int kCallerFPOffset
static const int kEntrySize
MemOperand ContextOperand(Register context, int index)
static const int kContextOffset
const uint32_t kShortExternalStringMask
int GetSequenceIndexFromFastElementsKind(ElementsKind elements_kind)
static const int kLastSubjectOffset
static const int kZeroHash
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
static const int kLastCaptureCountOffset
static const int kFirstOffset
static const int kMinLength
const uint32_t kNotStringTag
static const int kParentOffset
static const int kLiteralsOffset
const int kFastElementsKindPackedToHoley
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_string(expose_natives_as
static void GenerateCopyCharactersREP(MacroAssembler *masm, Register dest, Register src, Register count, Register scratch, bool ascii)
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
static const int kLengthOffset
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
static const int kExponentShift
const intptr_t kFailureTagMask
static const int kValueOffset
const int kFailureTagSize
bool IsFastPackedElementsKind(ElementsKind kind)
static void GenerateFlatAsciiStringEquals(MacroAssembler *masm, Register left, Register right, Register scratch1, Register scratch2, Register scratch3)
static const int kIrregexpCaptureCountOffset
static const size_t kWriteBarrierCounterOffset
const uint32_t kIsIndirectStringMask
ElementsKind GetFastElementsKindFromSequenceIndex(int sequence_number)
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
static const int kTransitionInfoOffset
static void PerformGC(Object *result, Isolate *isolate)
static void MaybeCallEntryHook(MacroAssembler *masm)
Operand FieldOperand(Register object, int offset)
static void GenerateAheadOfTime(Isolate *isolate)
void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
static const int kCallerSPOffset
static const byte kFiveByteNopInstruction
static const uint32_t kShift
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
virtual void Generate(MacroAssembler *masm)
bool IsFastSmiElementsKind(ElementsKind kind)
static const int kMinLength
const uint32_t kShortExternalStringTag
static void GenerateHashAddCharacter(MacroAssembler *masm, Register hash, Register character)
static void Generate(MacroAssembler *masm, Register string, Register index, Register result, Label *call_runtime)
static const int kHeaderSize
static void GenerateStubsAheadOfTime(Isolate *isolate)
static const int kElementsOffset
static const int kNativeBitWithinByte
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
const uint32_t kStringTag
void Generate(MacroAssembler *masm)
static bool IsEqualityOp(Value op)
static Representation External()
static const int kOffsetOffset
const uint32_t kInternalizedTag
static const int kLengthOffset
#define T(name, string, precedence)
static void GenerateNegativeLookup(MacroAssembler *masm, Label *miss, Label *done, Register receiver, Register properties, Handle< Name > name, Register scratch0)
void GenerateSlow(MacroAssembler *masm, const RuntimeCallHelper &call_helper)
virtual void Generate(MacroAssembler *masm)
bool IsFastSmiOrObjectElementsKind(ElementsKind kind)
static const int kLastMatchOverhead
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
static const int kHeaderSize
void Generate(MacroAssembler *masm)
static Builtins::Name MissBuiltin(Code::Kind kind)
static const int kMapOffset
static const int kMaxShortLength
bool is(Register reg) const
static const int kSkipEvacuationSlotsRecordingMask
const uint32_t kIsNotStringMask
static const uint64_t kSignificandMask
static const int kCallInstructionLength
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
const uint32_t kSlicedNotConsMask
static const int kLengthOffset
void Generate(MacroAssembler *masm)
static const int kSecondOffset
static Handle< Object > UninitializedSentinel(Isolate *isolate)
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor) V8_OVERRIDE
Handle< T > handle(T *t, Isolate *isolate)
static const int kArgumentsLengthIndex
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
static const int kFunctionOffset
ElementsKind GetInitialFastElementsKind()
static const int kFirstCaptureOffset
static const int kLastInputOffset
static void GenerateMapChangeElementsTransition(MacroAssembler *masm, AllocationSiteMode mode, Label *allocation_memento_found)
static const int kStrictModeByteOffset
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
Operand ApiParameterOperand(int index)
static const int kHeaderSize
void InvokeAccessorGetterCallback(v8::Local< v8::String > property, const v8::PropertyCallbackInfo< v8::Value > &info, v8::AccessorGetterCallback getter)
static AllocationSiteMode GetMode(ElementsKind boilerplate_elements_kind)
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
void GenerateBody(MacroAssembler *masm, bool is_construct)
static const int kDataAsciiCodeOffset
ICCompareStub(Token::Value op, CompareIC::State left, CompareIC::State right, CompareIC::State handler)
static void GenerateStubsAheadOfTime(Isolate *isolate)
Condition NegateCondition(Condition cond)
#define ASSERT_EQ(v1, v2)
static const int kElementsKindShift
static XMMRegister from_code(int code)
CodeStubInterfaceDescriptor * GetInterfaceDescriptor(Isolate *isolate)
const uint32_t kOneByteStringTag
void Generate(MacroAssembler *masm)
static const int kArgumentsCalleeIndex
static const int kIsUndetectable
static const int kHeaderSize
static void InitializeForIsolate(Isolate *isolate)
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
static const int kDataTagOffset
static const uint32_t kHashBitMask
static const int kPrototypeOffset
void Generate(MacroAssembler *masm)
static const int kHashShift
#define RUNTIME_ENTRY(name, nargs, ressize)
static void GenerateAheadOfTime(Isolate *isolate)
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
static Representation Tagged()
static const int kNativeContextOffset
static void GenerateFixedRegStubsAheadOfTime(Isolate *isolate)
static const int kConstructStubOffset
static const int kEmptyHashField
static const int kSharedFunctionInfoOffset
static void GeneratePositiveLookup(MacroAssembler *masm, Label *miss, Label *done, Register elements, Register name, Register r0, Register r1)
static const int kSloppyArgumentsObjectSize
void InvokeFunctionCallback(const v8::FunctionCallbackInfo< v8::Value > &info, v8::FunctionCallback callback)
static const int kBitField2Offset
static const int kMantissaBits
void Generate(MacroAssembler *masm)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in name
CEntryStub(int result_size, SaveFPRegsMode save_doubles=kDontSaveFPRegs)
ElementsKind GetHoleyElementsKind(ElementsKind packed_kind)
static const int32_t kMaxOneByteCharCode
static const int kDataUC16CodeOffset
StoreBufferOverflowStub(SaveFPRegsMode save_fp)
static const int kStrictArgumentsObjectSize
NameDictionaryLookupStub(LookupMode mode)
static void GenerateHashInit(MacroAssembler *masm, Register hash, Register character)
bool IsFastDoubleElementsKind(ElementsKind kind)
static bool IsOrderedRelationalCompareOp(Value op)
const uint32_t kStringEncodingMask
static const int kInstanceTypeOffset
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
void Generate(MacroAssembler *masm)
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
static const int kPhysicalSignificandSize