30 #if V8_TARGET_ARCH_ARM64
43 CodeStubInterfaceDescriptor* descriptor) {
45 static Register registers[] = { x2 };
46 descriptor->register_param_count_ =
sizeof(registers) /
sizeof(registers[0]);
47 descriptor->register_params_ = registers;
48 descriptor->deoptimization_handler_ =
53 void FastNewContextStub::InitializeInterfaceDescriptor(
55 CodeStubInterfaceDescriptor* descriptor) {
57 static Register registers[] = { x1 };
58 descriptor->register_param_count_ =
sizeof(registers) /
sizeof(registers[0]);
59 descriptor->register_params_ = registers;
60 descriptor->deoptimization_handler_ =
NULL;
66 CodeStubInterfaceDescriptor* descriptor) {
68 static Register registers[] = { x0 };
69 descriptor->register_param_count_ =
sizeof(registers) /
sizeof(registers[0]);
70 descriptor->register_params_ = registers;
71 descriptor->deoptimization_handler_ =
NULL;
75 void NumberToStringStub::InitializeInterfaceDescriptor(
77 CodeStubInterfaceDescriptor* descriptor) {
79 static Register registers[] = { x0 };
80 descriptor->register_param_count_ =
sizeof(registers) /
sizeof(registers[0]);
81 descriptor->register_params_ = registers;
82 descriptor->deoptimization_handler_ =
89 CodeStubInterfaceDescriptor* descriptor) {
93 static Register registers[] = { x3, x2, x1 };
94 descriptor->register_param_count_ =
sizeof(registers) /
sizeof(registers[0]);
95 descriptor->register_params_ = registers;
96 descriptor->deoptimization_handler_ =
98 Runtime::kHiddenCreateArrayLiteralStubBailout)->
entry;
104 CodeStubInterfaceDescriptor* descriptor) {
109 static Register registers[] = { x3, x2, x1, x0 };
110 descriptor->register_param_count_ =
sizeof(registers) /
sizeof(registers[0]);
111 descriptor->register_params_ = registers;
112 descriptor->deoptimization_handler_ =
119 CodeStubInterfaceDescriptor* descriptor) {
122 static Register registers[] = { x2, x3 };
123 descriptor->register_param_count_ =
sizeof(registers) /
sizeof(registers[0]);
124 descriptor->register_params_ = registers;
125 descriptor->deoptimization_handler_ =
NULL;
131 CodeStubInterfaceDescriptor* descriptor) {
134 static Register registers[] = { x1, x0 };
135 descriptor->register_param_count_ =
sizeof(registers) /
sizeof(registers[0]);
136 descriptor->register_params_ = registers;
137 descriptor->deoptimization_handler_ =
144 CodeStubInterfaceDescriptor* descriptor) {
147 static Register registers[] = { x1, x0 };
148 descriptor->register_param_count_ =
sizeof(registers) /
sizeof(registers[0]);
149 descriptor->register_params_ = registers;
150 descriptor->deoptimization_handler_ =
155 void RegExpConstructResultStub::InitializeInterfaceDescriptor(
157 CodeStubInterfaceDescriptor* descriptor) {
161 static Register registers[] = { x2, x1, x0 };
162 descriptor->register_param_count_ =
sizeof(registers) /
sizeof(registers[0]);
163 descriptor->register_params_ = registers;
164 descriptor->deoptimization_handler_ =
171 CodeStubInterfaceDescriptor* descriptor) {
173 static Register registers[] = { x0 };
174 descriptor->register_param_count_ =
sizeof(registers) /
sizeof(registers[0]);
175 descriptor->register_params_ = registers;
176 descriptor->deoptimization_handler_ =
NULL;
182 CodeStubInterfaceDescriptor* descriptor) {
184 static Register registers[] = { x1 };
185 descriptor->register_param_count_ =
sizeof(registers) /
sizeof(registers[0]);
186 descriptor->register_params_ = registers;
187 descriptor->deoptimization_handler_ =
NULL;
193 CodeStubInterfaceDescriptor* descriptor) {
194 static Register registers[] = { x0, x2 };
195 descriptor->register_param_count_ = 2;
196 descriptor->register_params_ = registers;
197 descriptor->deoptimization_handler_ =
NULL;
203 CodeStubInterfaceDescriptor* descriptor) {
204 static Register registers[] = { x1, x0 };
205 descriptor->register_param_count_ = 2;
206 descriptor->register_params_ = registers;
207 descriptor->deoptimization_handler_ =
NULL;
213 CodeStubInterfaceDescriptor* descriptor) {
217 static Register registers[] = { x2, x1, x0 };
218 descriptor->register_param_count_ =
sizeof(registers) /
sizeof(registers[0]);
219 descriptor->register_params_ = registers;
220 descriptor->deoptimization_handler_ =
227 CodeStubInterfaceDescriptor* descriptor) {
230 static Register registers[] = { x0, x1 };
231 descriptor->register_param_count_ =
sizeof(registers) /
sizeof(registers[0]);
232 descriptor->register_params_ = registers;
241 CodeStubInterfaceDescriptor* descriptor) {
243 static Register registers[] = { x0 };
244 descriptor->register_param_count_ =
sizeof(registers) /
sizeof(registers[0]);
245 descriptor->register_params_ = registers;
246 descriptor->deoptimization_handler_ =
248 descriptor->SetMissHandler(
249 ExternalReference(IC_Utility(IC::kCompareNilIC_Miss), isolate));
253 static void InitializeArrayConstructorDescriptor(
255 CodeStubInterfaceDescriptor* descriptor,
256 int constant_stack_parameter_count) {
260 static Register registers_variable_args[] = { x1, x2, x0 };
261 static Register registers_no_args[] = { x1, x2 };
263 if (constant_stack_parameter_count == 0) {
264 descriptor->register_param_count_ =
265 sizeof(registers_no_args) /
sizeof(registers_no_args[0]);
266 descriptor->register_params_ = registers_no_args;
270 descriptor->stack_parameter_count_ = x0;
271 descriptor->register_param_count_ =
272 sizeof(registers_variable_args) /
sizeof(registers_variable_args[0]);
273 descriptor->register_params_ = registers_variable_args;
276 descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count;
278 descriptor->deoptimization_handler_ =
285 CodeStubInterfaceDescriptor* descriptor) {
286 InitializeArrayConstructorDescriptor(isolate, descriptor, 0);
292 CodeStubInterfaceDescriptor* descriptor) {
293 InitializeArrayConstructorDescriptor(isolate, descriptor, 1);
299 CodeStubInterfaceDescriptor* descriptor) {
300 InitializeArrayConstructorDescriptor(isolate, descriptor, -1);
304 static void InitializeInternalArrayConstructorDescriptor(
306 CodeStubInterfaceDescriptor* descriptor,
307 int constant_stack_parameter_count) {
310 static Register registers_variable_args[] = { x1, x0 };
311 static Register registers_no_args[] = { x1 };
313 if (constant_stack_parameter_count == 0) {
314 descriptor->register_param_count_ =
315 sizeof(registers_no_args) /
sizeof(registers_no_args[0]);
316 descriptor->register_params_ = registers_no_args;
320 descriptor->stack_parameter_count_ = x0;
321 descriptor->register_param_count_ =
322 sizeof(registers_variable_args) /
sizeof(registers_variable_args[0]);
323 descriptor->register_params_ = registers_variable_args;
326 descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count;
328 descriptor->deoptimization_handler_ =
335 CodeStubInterfaceDescriptor* descriptor) {
336 InitializeInternalArrayConstructorDescriptor(isolate, descriptor, 0);
342 CodeStubInterfaceDescriptor* descriptor) {
343 InitializeInternalArrayConstructorDescriptor(isolate, descriptor, 1);
349 CodeStubInterfaceDescriptor* descriptor) {
350 InitializeInternalArrayConstructorDescriptor(isolate, descriptor, -1);
356 CodeStubInterfaceDescriptor* descriptor) {
358 static Register registers[] = { x0 };
359 descriptor->register_param_count_ =
sizeof(registers) /
sizeof(registers[0]);
360 descriptor->register_params_ = registers;
361 descriptor->deoptimization_handler_ =
FUNCTION_ADDR(ToBooleanIC_Miss);
362 descriptor->SetMissHandler(
363 ExternalReference(IC_Utility(IC::kToBooleanIC_Miss), isolate));
369 CodeStubInterfaceDescriptor* descriptor) {
373 static Register registers[] = { x1, x2, x0 };
374 descriptor->register_param_count_ =
sizeof(registers) /
sizeof(registers[0]);
375 descriptor->register_params_ = registers;
376 descriptor->deoptimization_handler_ =
383 CodeStubInterfaceDescriptor* descriptor) {
388 static Register registers[] = { x0, x3, x1, x2 };
389 descriptor->register_param_count_ =
sizeof(registers) /
sizeof(registers[0]);
390 descriptor->register_params_ = registers;
391 descriptor->deoptimization_handler_ =
398 CodeStubInterfaceDescriptor* descriptor) {
401 static Register registers[] = { x1, x0 };
402 descriptor->register_param_count_ =
sizeof(registers) /
sizeof(registers[0]);
403 descriptor->register_params_ = registers;
404 descriptor->deoptimization_handler_ =
FUNCTION_ADDR(BinaryOpIC_Miss);
405 descriptor->SetMissHandler(
406 ExternalReference(IC_Utility(IC::kBinaryOpIC_Miss), isolate));
410 void BinaryOpWithAllocationSiteStub::InitializeInterfaceDescriptor(
412 CodeStubInterfaceDescriptor* descriptor) {
416 static Register registers[] = { x2, x1, x0 };
417 descriptor->register_param_count_ =
sizeof(registers) /
sizeof(registers[0]);
418 descriptor->register_params_ = registers;
419 descriptor->deoptimization_handler_ =
424 void StringAddStub::InitializeInterfaceDescriptor(
426 CodeStubInterfaceDescriptor* descriptor) {
429 static Register registers[] = { x1, x0 };
430 descriptor->register_param_count_ =
sizeof(registers) /
sizeof(registers[0]);
431 descriptor->register_params_ = registers;
432 descriptor->deoptimization_handler_ =
438 static PlatformCallInterfaceDescriptor default_descriptor =
441 static PlatformCallInterfaceDescriptor noInlineDescriptor =
445 CallInterfaceDescriptor* descriptor =
447 static Register registers[] = { x1,
452 static Representation representations[] = {
458 descriptor->register_param_count_ = 4;
459 descriptor->register_params_ = registers;
460 descriptor->param_representations_ = representations;
461 descriptor->platform_specific_descriptor_ = &default_descriptor;
464 CallInterfaceDescriptor* descriptor =
466 static Register registers[] = {
cp,
469 static Representation representations[] = {
473 descriptor->register_param_count_ = 2;
474 descriptor->register_params_ = registers;
475 descriptor->param_representations_ = representations;
476 descriptor->platform_specific_descriptor_ = &noInlineDescriptor;
479 CallInterfaceDescriptor* descriptor =
481 static Register registers[] = {
cp,
484 static Representation representations[] = {
488 descriptor->register_param_count_ = 2;
489 descriptor->register_params_ = registers;
490 descriptor->param_representations_ = representations;
491 descriptor->platform_specific_descriptor_ = &noInlineDescriptor;
494 CallInterfaceDescriptor* descriptor =
496 static Register registers[] = {
cp,
499 static Representation representations[] = {
503 descriptor->register_param_count_ = 2;
504 descriptor->register_params_ = registers;
505 descriptor->param_representations_ = representations;
506 descriptor->platform_specific_descriptor_ = &default_descriptor;
509 CallInterfaceDescriptor* descriptor =
511 static Register registers[] = { x0,
517 static Representation representations[] = {
524 descriptor->register_param_count_ = 5;
525 descriptor->register_params_ = registers;
526 descriptor->param_representations_ = representations;
527 descriptor->platform_specific_descriptor_ = &default_descriptor;
532 #define __ ACCESS_MASM(masm)
535 void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm) {
537 Isolate* isolate = masm->isolate();
538 isolate->counters()->code_stubs()->Increment();
541 int param_count = descriptor->register_param_count_;
545 ASSERT((descriptor->register_param_count_ == 0) ||
546 x0.Is(descriptor->register_params_[param_count - 1]));
549 MacroAssembler::PushPopQueue queue(masm);
550 for (
int i = 0; i < param_count; ++i) {
551 queue.Queue(descriptor->register_params_[i]);
555 ExternalReference miss = descriptor->miss_handler();
556 __ CallExternalReference(miss, descriptor->register_param_count_);
565 Register input =
source();
569 ASSERT(result.Is64Bits());
570 ASSERT(jssp.Is(masm->StackPointer()));
572 int double_offset =
offset();
579 __ Push(scratch1, scratch2);
584 __ Push(double_scratch);
585 if (input.is(jssp)) double_offset += 1 *
kDoubleSize;
586 __ Ldr(double_scratch,
MemOperand(input, double_offset));
589 __ TryConvertDoubleToInt64(result, double_scratch, &done);
590 __ Fmov(result, double_scratch);
598 Register exponent = scratch1;
605 __ CzeroX(result,
ge);
612 if (masm->emit_debug_code()) {
615 __ Check(
ge, kUnexpectedValue);
619 Register mantissa = scratch2;
625 __ Cneg(mantissa, mantissa,
ne);
629 __ Sub(exponent, exponent,
631 __ Lsl(result, mantissa, exponent);
635 __ Pop(double_scratch);
637 __ Pop(scratch2, scratch1);
643 static void EmitIdenticalObjectComparison(MacroAssembler* masm,
647 FPRegister double_scratch,
651 Label not_identical, return_equal, heap_number;
652 Register result = x0;
655 __ B(
ne, ¬_identical);
661 if ((cond ==
lt) || (cond ==
gt)) {
665 Register right_type = scratch;
675 if ((cond ==
le) || (cond ==
ge)) {
677 __ B(
ne, &return_equal);
678 __ JumpIfNotRoot(right, Heap::kUndefinedValueRootIndex, &return_equal);
691 __ Bind(&return_equal);
694 }
else if (cond ==
gt) {
704 if ((cond !=
lt) && (cond !=
gt)) {
706 __ Bind(&heap_number);
711 __ Fcmp(double_scratch, double_scratch);
712 __ B(
vc, &return_equal);
723 if (FLAG_debug_code) {
727 __ Bind(¬_identical);
732 static void EmitStrictTwoHeapObjectCompare(MacroAssembler* masm,
740 if (masm->emit_debug_code()) {
743 __ Assert(
ne, kExpectedNonIdenticalObjects);
750 Label right_non_object;
753 __ B(
lt, &right_non_object);
756 ASSERT(left.is(x0) || right.is(x0));
757 Label return_not_equal;
758 __ Bind(&return_not_equal);
761 __ Bind(&right_non_object);
773 __ B(
ge, &return_not_equal);
779 __ Orr(scratch, left_type, right_type);
780 __ TestAndBranchIfAllClear(
786 static void EmitSmiNonsmiComparison(MacroAssembler* masm,
796 ASSERT((left.is(x0) && right.is(x1)) ||
797 (right.is(x0) && left.is(x1)));
798 Register result = x0;
800 Label right_is_smi, done;
801 __ JumpIfSmi(right, &right_is_smi);
807 Label is_heap_number;
811 if (!right.is(result)) {
815 __ Bind(&is_heap_number);
825 __ SmiUntagToDouble(left_d, left);
828 __ Bind(&right_is_smi);
833 Label is_heap_number;
837 if (!left.is(result)) {
841 __ Bind(&is_heap_number);
851 __ SmiUntagToDouble(right_d, right);
860 static void EmitCheckForInternalizedStringsOrObjects(MacroAssembler* masm,
867 Label* possible_strings,
868 Label* not_both_strings) {
869 ASSERT(!
AreAliased(left, right, left_map, right_map, left_type, right_type));
870 Register result = x0;
886 __ Bind(&object_test);
894 __ B(
lt, not_both_strings);
901 Register right_bitfield = right_type;
902 Register left_bitfield = left_type;
905 __ And(result, right_bitfield, left_bitfield);
912 static void ICCompareStub_CheckInputType(MacroAssembler* masm,
919 __ JumpIfNotSmi(input, fail);
921 __ JumpIfSmi(input, &ok);
922 __ CheckMap(input, scratch, Heap::kHeapNumberMapRootIndex, fail,
931 void ICCompareStub::GenerateGeneric(MacroAssembler* masm) {
934 Register result = x0;
938 ICCompareStub_CheckInputType(masm, lhs, x2, left_, &miss);
939 ICCompareStub_CheckInputType(masm, rhs, x3, right_, &miss);
942 Label not_smis, both_loaded_as_doubles;
943 Label not_two_smis, smi_done;
944 __ JumpIfEitherNotSmi(lhs, rhs, ¬_two_smis);
949 __ Bind(¬_two_smis);
956 EmitIdenticalObjectComparison(masm, lhs, rhs, x10,
d0, &slow, cond);
960 __ JumpIfBothNotSmi(lhs, rhs, ¬_smis);
970 FPRegister rhs_d =
d0;
971 FPRegister lhs_d =
d1;
972 EmitSmiNonsmiComparison(masm, lhs, rhs, lhs_d, rhs_d, x10, &slow, strict());
974 __ Bind(&both_loaded_as_doubles);
978 __ Fcmp(lhs_d, rhs_d);
982 __ Csinv(result, result, xzr,
ge);
990 if ((cond ==
lt) || (cond ==
le)) {
1002 Register rhs_map = x10;
1003 Register rhs_type = x11;
1004 Register lhs_map = x12;
1005 Register lhs_type = x13;
1014 EmitStrictTwoHeapObjectCompare(masm, lhs, rhs, lhs_type, rhs_type, x14);
1017 Label check_for_internalized_strings;
1018 Label flat_string_check;
1022 __ B(
ne, &check_for_internalized_strings);
1023 __ Cmp(lhs_map, rhs_map);
1027 __ B(
ne, &flat_string_check);
1033 __ B(&both_loaded_as_doubles);
1035 __ Bind(&check_for_internalized_strings);
1038 if ((cond ==
eq) && !strict()) {
1041 EmitCheckForInternalizedStringsOrObjects(masm, lhs, rhs, lhs_map, rhs_map,
1043 &flat_string_check, &slow);
1048 __ Bind(&flat_string_check);
1049 __ JumpIfBothInstanceTypesAreNotSequentialAscii(lhs_type, rhs_type, x14,
1052 Isolate* isolate = masm->isolate();
1053 __ IncrementCounter(isolate->counters()->string_compare_native(), 1, x10,
1060 x10, x11, x12, x13);
1064 if (FLAG_debug_code) {
1074 native = strict() ? Builtins::STRICT_EQUALS : Builtins::EQUALS;
1078 if ((cond ==
lt) || (cond ==
le)) {
1101 saved_regs.Remove(ip0);
1102 saved_regs.Remove(ip1);
1103 saved_regs.Remove(x8);
1104 saved_regs.Remove(x9);
1109 __ PushCPURegList(saved_regs);
1114 AllowExternalCallThatCantCauseGC scope(masm);
1115 __ Mov(x0, ExternalReference::isolate_address(masm->isolate()));
1117 ExternalReference::store_buffer_overflow_function(masm->isolate()),
1123 __ PopCPURegList(saved_regs);
1131 stub1.GetCode(isolate);
1133 stub2.GetCode(isolate);
1137 void StoreRegistersStateStub::Generate(MacroAssembler* masm) {
1138 MacroAssembler::NoUseRealAbortsScope no_use_real_aborts(masm);
1139 UseScratchRegisterScope temps(masm);
1141 Register return_address = temps.AcquireX();
1142 __ Mov(return_address,
lr);
1145 __ Mov(
lr, saved_lr);
1147 __ PushSafepointRegistersAndDoubles();
1149 __ PushSafepointRegisters();
1151 __ Ret(return_address);
1155 void RestoreRegistersStateStub::Generate(MacroAssembler* masm) {
1156 MacroAssembler::NoUseRealAbortsScope no_use_real_aborts(masm);
1157 UseScratchRegisterScope temps(masm);
1158 Register return_address = temps.AcquireX();
1160 __ Mov(return_address,
lr);
1162 __ PopSafepointRegistersAndDoubles();
1164 __ PopSafepointRegisters();
1166 __ Ret(return_address);
1177 Register result_tagged = x0;
1178 Register base_tagged = x10;
1179 Register exponent_tagged = x11;
1180 Register exponent_integer = x12;
1181 Register scratch1 = x14;
1182 Register scratch0 = x15;
1183 Register saved_lr = x19;
1184 FPRegister result_double =
d0;
1185 FPRegister base_double =
d0;
1186 FPRegister exponent_double =
d1;
1187 FPRegister base_double_copy =
d2;
1188 FPRegister scratch1_double =
d6;
1189 FPRegister scratch0_double =
d7;
1192 Label exponent_is_smi, exponent_is_integer;
1201 Label unpack_exponent;
1203 __ Pop(exponent_tagged, base_tagged);
1205 __ JumpIfSmi(base_tagged, &base_is_smi);
1206 __ JumpIfNotHeapNumber(base_tagged, &call_runtime);
1209 __ B(&unpack_exponent);
1210 __ Bind(&base_is_smi);
1212 __ SmiUntagToDouble(base_double, base_tagged);
1214 __ Bind(&unpack_exponent);
1218 __ JumpIfSmi(exponent_tagged, &exponent_is_smi);
1219 __ JumpIfNotHeapNumber(exponent_tagged, &call_runtime);
1221 __ Ldr(exponent_double,
1223 }
else if (exponent_type_ ==
TAGGED) {
1224 __ JumpIfSmi(exponent_tagged, &exponent_is_smi);
1225 __ Ldr(exponent_double,
1230 if (exponent_type_ !=
INTEGER) {
1233 __ TryConvertDoubleToInt64(exponent_integer, exponent_double,
1234 scratch0_double, &exponent_is_integer);
1237 FPRegister half_double =
d3;
1238 FPRegister minus_half_double =
d4;
1243 __ Fmov(minus_half_double, -0.5);
1244 __ Fmov(half_double, 0.5);
1245 __ Fcmp(minus_half_double, exponent_double);
1246 __ Fccmp(half_double, exponent_double,
NZFlag,
ne);
1251 __ B(
ne, &call_runtime);
1271 __ Fadd(base_double, base_double, fp_zero);
1276 if (masm->emit_debug_code()) {
1277 UseScratchRegisterScope temps(masm);
1278 Register temp = temps.AcquireX();
1279 __ Fneg(scratch0_double, fp_zero);
1283 __ Fmov(temp, fp_zero);
1284 __ CheckRegisterIsClear(temp, kCouldNotGenerateZero);
1285 __ Fmov(temp, scratch0_double);
1287 __ CheckRegisterIsClear(temp, kCouldNotGenerateNegativeZero);
1289 __ Fadd(scratch0_double, scratch0_double, fp_zero);
1290 __ Fmov(temp, scratch0_double);
1291 __ CheckRegisterIsClear(temp, kExpectedPositiveZero);
1298 __ Fsub(scratch0_double, base_double, base_double);
1299 __ Fcmp(scratch0_double, 0.0);
1300 __ Fabs(scratch1_double, base_double);
1301 __ Fcsel(base_double, scratch1_double, base_double,
vs);
1304 __ Fsqrt(result_double, base_double);
1305 __ Fcmp(exponent_double, 0.0);
1308 __ Fmov(scratch0_double, 1.0);
1309 __ Fdiv(result_double, scratch0_double, result_double);
1314 AllowExternalCallThatCantCauseGC scope(masm);
1315 __ Mov(saved_lr,
lr);
1317 ExternalReference::power_double_double_function(masm->isolate()),
1319 __ Mov(
lr, saved_lr);
1324 __ Bind(&exponent_is_smi);
1328 __ SmiUntag(exponent_integer, exponent_tagged);
1331 __ Bind(&exponent_is_integer);
1338 Register exponent_abs = x13;
1339 __ Cmp(exponent_integer, 0);
1340 __ Cneg(exponent_abs, exponent_integer,
mi);
1354 Label power_loop, power_loop_entry, power_loop_exit;
1355 __ Fmov(scratch1_double, base_double);
1356 __ Fmov(base_double_copy, base_double);
1357 __ Fmov(result_double, 1.0);
1358 __ B(&power_loop_entry);
1360 __ Bind(&power_loop);
1361 __ Fmul(scratch1_double, scratch1_double, scratch1_double);
1362 __ Lsr(exponent_abs, exponent_abs, 1);
1363 __ Cbz(exponent_abs, &power_loop_exit);
1365 __ Bind(&power_loop_entry);
1366 __ Tbz(exponent_abs, 0, &power_loop);
1367 __ Fmul(result_double, result_double, scratch1_double);
1370 __ Bind(&power_loop_exit);
1376 __ Fmov(scratch0_double, 1.0);
1377 __ Fdiv(result_double, scratch0_double, result_double);
1384 __ Fcmp(result_double, 0.0);
1389 __ Bind(&call_runtime);
1391 __ Push(base_tagged, exponent_tagged);
1392 __ TailCallRuntime(Runtime::kMath_pow_cfunction, 2, 1);
1396 __ AllocateHeapNumber(result_tagged, &call_runtime, scratch0, scratch1);
1397 __ Str(result_double,
1399 ASSERT(result_tagged.is(x0));
1400 __ IncrementCounter(
1401 masm->isolate()->counters()->math_pow(), 1, scratch0, scratch1);
1404 AllowExternalCallThatCantCauseGC scope(masm);
1405 __ Mov(saved_lr,
lr);
1406 __ Fmov(base_double, base_double_copy);
1407 __ Scvtf(exponent_double, exponent_integer);
1409 ExternalReference::power_double_double_function(masm->isolate()),
1411 __ Mov(
lr, saved_lr);
1413 __ IncrementCounter(
1414 masm->isolate()->counters()->math_pow(), 1, scratch0, scratch1);
1420 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
1433 BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate);
1439 stub1.GetCode(isolate);
1441 stub2.GetCode(isolate);
1447 stub1.GetCode(isolate);
1449 stub2.GetCode(isolate);
1453 void CodeStub::GenerateFPStubs(Isolate* isolate) {
1460 bool CEntryStub::NeedsImmovableCode() {
1475 stub.GetCode(isolate);
1477 stub_fp.GetCode(isolate);
1481 void CEntryStub::GenerateCore(MacroAssembler* masm,
1482 Label* throw_normal,
1483 Label* throw_termination,
1485 bool always_allocate) {
1523 Isolate* isolate = masm->isolate();
1525 const Register& argv = x21;
1526 const Register& argc = x22;
1527 const Register& target = x23;
1532 __ Mov(x1, ExternalReference::isolate_address(masm->isolate()));
1534 ExternalReference::perform_gc_function(isolate), 2, 0);
1537 ExternalReference scope_depth =
1538 ExternalReference::heap_always_allocate_scope_depth(isolate);
1539 if (always_allocate) {
1540 __ Mov(x10, Operand(scope_depth));
1542 __ Add(x11, x11, 1);
1549 __ Mov(x2, ExternalReference::isolate_address(isolate));
1554 Label return_location;
1555 __ Adr(x12, &return_location);
1557 if (
__ emit_debug_code()) {
1560 UseScratchRegisterScope temps(masm);
1561 Register temp = temps.AcquireX();
1565 __ Check(
eq, kReturnAddressNotFoundInFrame);
1570 __ Bind(&return_location);
1571 const Register& result = x0;
1573 if (always_allocate) {
1574 __ Mov(x10, Operand(scope_depth));
1576 __ Sub(x11, x11, 1);
1602 __ LeaveExitFrame(save_doubles_, x10,
true);
1603 ASSERT(jssp.Is(
__ StackPointer()));
1614 __ SetStackPointer(csp);
1632 const Register& exception = result;
1633 const Register& exception_address = x11;
1634 __ Mov(exception_address,
1635 Operand(ExternalReference(Isolate::kPendingExceptionAddress,
1640 __ Mov(x10, Operand(isolate->factory()->the_hole_value()));
1650 __ Cmp(exception, Operand(isolate->factory()->termination_exception()));
1651 __ B(
eq, throw_termination);
1667 MacroAssembler::NoUseRealAbortsScope no_use_real_aborts(masm);
1687 ASSERT(jssp.Is(
__ StackPointer()));
1688 const Register& argc_input = x0;
1689 const Register& target_input = x1;
1698 const Register& argv = x21;
1699 const Register& argc = x22;
1700 const Register& target = x23;
1705 Register temp_argv = x11;
1712 FrameScope scope(masm, StackFrame::MANUAL);
1713 __ EnterExitFrame(save_doubles_, x10, 3);
1726 __ Mov(argc, argc_input);
1727 __ Mov(target, target_input);
1728 __ Mov(argv, temp_argv);
1731 Label throw_termination;
1758 { FrameScope scope(masm, StackFrame::MANUAL);
1760 ExternalReference::out_of_memory_function(masm->isolate()), 0);
1767 __ SetStackPointer(jssp);
1774 __ Bind(&throw_termination);
1779 __ ThrowUncatchable(x0, x10, x11, x12, x13);
1781 __ Bind(&throw_normal);
1786 __ Throw(x0, x10, x11, x12, x13);
1801 ASSERT(jssp.Is(
__ StackPointer()));
1802 Register code_entry = x0;
1806 __ EnableInstrumentation();
1808 Label invoke, handler_entry, exit;
1815 __ SetStackPointer(csp);
1816 __ PushCalleeSavedRegisters();
1818 __ SetStackPointer(jssp);
1823 __ Fmov(fp_zero, 0.0);
1826 Isolate* isolate = masm->isolate();
1829 int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY;
1830 int64_t bad_frame_pointer = -1
L;
1831 __ Mov(x13, bad_frame_pointer);
1833 __ Mov(x11, ExternalReference(Isolate::kCEntryFPAddress, isolate));
1836 __ Push(x13, xzr, x12, x10);
1842 Label non_outermost_js, done;
1843 ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate);
1844 __ Mov(x10, ExternalReference(js_entry_sp));
1846 __ Cbnz(x11, &non_outermost_js);
1851 __ Bind(&non_outermost_js);
1875 Assembler::BlockPoolsScope block_pools(masm);
1876 __ bind(&handler_entry);
1877 handler_offset_ = handler_entry.pos();
1882 __ Mov(x10, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
1892 __ PushTryHandler(StackHandler::JS_ENTRY, 0);
1899 __ Mov(x10, Operand(isolate->factory()->the_hole_value()));
1900 __ Mov(x11, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
1914 ExternalReference entry(is_construct ? Builtins::kJSConstructEntryTrampoline
1915 : Builtins::kJSEntryTrampoline,
1939 Label non_outermost_js_2;
1942 __ B(
ne, &non_outermost_js_2);
1943 __ Mov(x11, ExternalReference(js_entry_sp));
1945 __ Bind(&non_outermost_js_2);
1949 __ Mov(x11, ExternalReference(Isolate::kCEntryFPAddress, isolate));
1955 ASSERT(jssp.Is(
__ StackPointer()));
1957 __ SetStackPointer(csp);
1958 __ PopCalleeSavedRegisters();
1968 if (
kind() == Code::KEYED_LOAD_IC) {
1976 __ Cmp(key, Operand(masm->isolate()->factory()->prototype_string()));
1989 StubCompiler::GenerateLoadFunctionPrototype(masm, receiver, x10, x11, &miss);
1992 StubCompiler::TailCallBuiltin(masm,
2005 Register result = x0;
2006 Register
function =
right();
2007 Register
object =
left();
2008 Register scratch1 = x6;
2009 Register scratch2 = x7;
2010 Register res_true = x8;
2011 Register res_false = x9;
2014 Register map_check_site = x4;
2019 Label not_js_object, slow;
2021 if (!HasArgsInRegisters()) {
2022 __ Pop(
function,
object);
2025 if (ReturnTrueFalseObject()) {
2026 __ LoadTrueFalseRoots(res_true, res_false);
2036 __ JumpIfSmi(
object, ¬_js_object);
2037 __ IsObjectJSObjectType(
object, map, scratch2, ¬_js_object);
2041 if (!HasCallSiteInlineCheck()) {
2043 __ JumpIfNotRoot(
function, Heap::kInstanceofCacheFunctionRootIndex, &miss);
2044 __ JumpIfNotRoot(map, Heap::kInstanceofCacheMapRootIndex, &miss);
2045 __ LoadRoot(result, Heap::kInstanceofCacheAnswerRootIndex);
2051 Register prototype = x13;
2052 __ TryGetFunctionPrototype(
function, prototype, scratch2, &slow,
2056 __ JumpIfSmi(prototype, &slow);
2057 __ IsObjectJSObjectType(prototype, scratch1, scratch2, &slow);
2061 if (HasCallSiteInlineCheck()) {
2063 __ GetRelocatedValueLocation(map_check_site, scratch1);
2068 __ StoreRoot(
function, Heap::kInstanceofCacheFunctionRootIndex);
2069 __ StoreRoot(map, Heap::kInstanceofCacheMapRootIndex);
2072 Label return_true, return_result;
2075 Register chain_map = x1;
2076 Register chain_prototype = x14;
2077 Register null_value = x15;
2080 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
2082 __ Mov(result, res_false);
2087 __ Cmp(chain_prototype, prototype);
2088 __ B(
eq, &return_true);
2092 __ Cmp(chain_prototype, null_value);
2093 __ B(
eq, &return_result);
2103 __ Bind(&return_true);
2104 __ Mov(result, res_true);
2105 __ Bind(&return_result);
2106 if (HasCallSiteInlineCheck()) {
2107 ASSERT(ReturnTrueFalseObject());
2108 __ Add(map_check_site, map_check_site, kDeltaToLoadBoolResult);
2109 __ GetRelocatedValueLocation(map_check_site, scratch2);
2112 __ StoreRoot(result, Heap::kInstanceofCacheAnswerRootIndex);
2116 Label object_not_null, object_not_null_or_smi;
2118 __ Bind(¬_js_object);
2119 Register object_type = x14;
2127 __ JumpIfSmi(
function, &slow);
2128 __ JumpIfNotObjectType(
2131 __ Mov(result, res_false);
2134 __ Cmp(object_type, Operand(masm->isolate()->factory()->null_value()));
2135 __ B(
ne, &object_not_null);
2138 __ Bind(&object_not_null);
2140 __ JumpIfNotSmi(
object, &object_not_null_or_smi);
2143 __ Bind(&object_not_null_or_smi);
2145 __ IsObjectJSStringType(
object, scratch2, &slow);
2154 __ Push(
object,
function);
2157 if (ReturnTrueFalseObject()) {
2159 __ LoadTrueFalseRoots(res_true, res_false);
2161 __ Csel(result, res_true, res_false,
eq);
2179 void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
2180 Register arg_count = x0;
2185 static const int kDisplacement =
2190 __ JumpIfNotSmi(key, &slow);
2193 Register local_fp = x11;
2194 Register caller_fp = x11;
2195 Register caller_ctx = x12;
2201 __ Csel(local_fp,
fp, caller_fp,
ne);
2202 __ B(
ne, &skip_adaptor);
2207 __ Bind(&skip_adaptor);
2211 __ Cmp(key, arg_count);
2215 __ Sub(x10, arg_count, key);
2224 __ TailCallRuntime(Runtime::kGetArgumentsProperty, 1, 1);
2228 void ArgumentsAccessStub::GenerateNewSloppySlow(MacroAssembler* masm) {
2236 Register caller_fp = x10;
2254 __ TailCallRuntime(Runtime::kHiddenNewArgumentsFast, 3, 1);
2258 void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) {
2267 Register arg_count_smi = x3;
2268 Register param_count_smi = x3;
2269 Register param_count = x7;
2270 Register recv_arg = x14;
2271 Register
function = x4;
2272 __ Pop(param_count_smi, recv_arg,
function);
2273 __ SmiUntag(param_count, param_count_smi);
2276 Register caller_fp = x11;
2277 Register caller_ctx = x12;
2279 Label adaptor_frame, try_allocate;
2284 __ B(
eq, &adaptor_frame);
2296 Register arg_count = x2;
2297 __ Mov(arg_count, param_count);
2298 __ B(&try_allocate);
2301 __ Bind(&adaptor_frame);
2302 __ Ldr(arg_count_smi,
2305 __ SmiUntag(arg_count, arg_count_smi);
2310 Register mapped_params = x1;
2311 __ Cmp(param_count, arg_count);
2312 __ Csel(mapped_params, param_count, arg_count,
lt);
2314 __ Bind(&try_allocate);
2329 const int kParameterMapHeaderSize =
2333 Register
size = x10;
2335 __ Add(size, size, kParameterMapHeaderSize);
2339 __ Cmp(mapped_params, 0);
2340 __ CzeroX(size,
eq);
2349 Register alloc_obj = x0;
2350 __ Allocate(size, alloc_obj, x11, x12, &runtime,
TAG_OBJECT);
2364 Register global_object = x10;
2365 Register global_ctx = x10;
2366 Register args_offset = x11;
2367 Register aliased_args_offset = x10;
2375 __ Ldr(aliased_args_offset,
2378 __ Cmp(mapped_params, 0);
2379 __ CmovX(args_offset, aliased_args_offset,
ne);
2382 __ CopyFields(alloc_obj, args_offset, CPURegList(x10, x12, x13),
2412 Register elements = x5;
2417 Label skip_parameter_map;
2418 __ Cmp(mapped_params, 0);
2421 Register backing_store = x6;
2422 __ CmovX(backing_store, elements,
eq);
2423 __ B(
eq, &skip_parameter_map);
2425 __ LoadRoot(x10, Heap::kSloppyArgumentsElementsMapRootIndex);
2427 __ Add(x10, mapped_params, 2);
2433 __ Add(x10, x10, kParameterMapHeaderSize);
2464 Register loop_count = x11;
2465 Register index = x12;
2466 Register the_hole = x13;
2467 Label parameters_loop, parameters_test;
2468 __ Mov(loop_count, mapped_params);
2470 __ Sub(index, index, mapped_params);
2472 __ LoadRoot(the_hole, Heap::kTheHoleValueRootIndex);
2474 __ Add(backing_store, backing_store, kParameterMapHeaderSize);
2476 __ B(¶meters_test);
2478 __ Bind(¶meters_loop);
2479 __ Sub(loop_count, loop_count, 1);
2486 __ Bind(¶meters_test);
2487 __ Cbnz(loop_count, ¶meters_loop);
2489 __ Bind(&skip_parameter_map);
2491 __ LoadRoot(x10, Heap::kFixedArrayMapRootIndex);
2505 Label arguments_loop, arguments_test;
2506 __ Mov(x10, mapped_params);
2508 __ B(&arguments_test);
2510 __ Bind(&arguments_loop);
2511 __ Sub(recv_arg, recv_arg, kPointerSize);
2515 __ Add(x10, x10, 1);
2517 __ Bind(&arguments_test);
2518 __ Cmp(x10, arg_count);
2519 __ B(
lt, &arguments_loop);
2525 __ Push(
function, recv_arg, arg_count_smi);
2526 __ TailCallRuntime(Runtime::kHiddenNewArgumentsFast, 3, 1);
2530 void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
2540 Register param_count_smi = x1;
2541 Register params = x2;
2542 Register
function = x3;
2543 Register param_count = x13;
2544 __ Pop(param_count_smi, params,
function);
2545 __ SmiUntag(param_count, param_count_smi);
2548 Register caller_fp = x11;
2549 Register caller_ctx = x12;
2550 Label try_allocate, runtime;
2555 __ B(
ne, &try_allocate);
2564 __ Ldr(param_count_smi,
2567 __ SmiUntag(param_count, param_count_smi);
2573 Register size = x10;
2574 __ Bind(&try_allocate);
2576 __ Cmp(param_count, 0);
2577 __ CzeroX(size,
eq);
2582 Register alloc_obj = x0;
2583 __ Allocate(size, alloc_obj, x11, x12, &runtime,
2587 Register global_object = x10;
2588 Register global_ctx = x10;
2589 Register args_offset = x4;
2606 __ CopyFields(alloc_obj, args_offset, CPURegList(x5, x6, x7),
2617 __ Cbz(param_count, &done);
2621 Register elements = x5;
2624 __ LoadRoot(x10, Heap::kFixedArrayMapRootIndex);
2639 Register array = x4;
2649 __ Sub(param_count, param_count, 1);
2650 __ Cbnz(param_count, &loop);
2658 __ Push(
function, params, param_count_smi);
2659 __ TailCallRuntime(Runtime::kHiddenNewStrictArgumentsFast, 3, 1);
2663 void RegExpExecStub::Generate(MacroAssembler* masm) {
2664 #ifdef V8_INTERPRETED_REGEXP
2665 __ TailCallRuntime(Runtime::kHiddenRegExpExec, 4, 1);
2666 #else // V8_INTERPRETED_REGEXP
2689 Register string_type = w0;
2690 Register jsstring_length = x2;
2691 Register jsregexp_object = x3;
2692 Register string_encoding = w4;
2693 Register sliced_string_offset = w5;
2694 Register string_representation = w6;
2707 Register subject = x19;
2708 Register regexp_data = x20;
2709 Register last_match_info_elements = x21;
2710 Register code_object = x22;
2713 CPURegList used_callee_saved_registers(subject,
2715 last_match_info_elements,
2717 __ PushCPURegList(used_callee_saved_registers);
2735 Isolate* isolate = masm->isolate();
2736 ExternalReference address_of_regexp_stack_memory_address =
2737 ExternalReference::address_of_regexp_stack_memory_address(isolate);
2738 ExternalReference address_of_regexp_stack_memory_size =
2739 ExternalReference::address_of_regexp_stack_memory_size(isolate);
2740 __ Mov(x10, address_of_regexp_stack_memory_size);
2742 __ Cbz(x10, &runtime);
2745 ASSERT(jssp.Is(
__ StackPointer()));
2746 __ Peek(jsregexp_object, kJSRegExpOffset);
2747 __ JumpIfSmi(jsregexp_object, &runtime);
2748 __ JumpIfNotObjectType(jsregexp_object, x10, x10,
JS_REGEXP_TYPE, &runtime);
2752 if (FLAG_debug_code) {
2755 __ Check(
ne, kUnexpectedTypeForRegExpDataFixedArrayExpected);
2757 __ Check(
eq, kUnexpectedTypeForRegExpDataFixedArrayExpected);
2775 __ Add(x10, x10, x10);
2780 __ Mov(sliced_string_offset, 0);
2782 ASSERT(jssp.Is(
__ StackPointer()));
2783 __ Peek(subject, kSubjectOffset);
2784 __ JumpIfSmi(subject, &runtime);
2808 Label check_underlying;
2810 Label not_seq_nor_cons;
2811 Label external_string;
2812 Label not_long_external;
2815 __ And(string_representation,
2831 __ Cbz(string_representation, &seq_string);
2839 __ B(
ge, ¬_seq_nor_cons);
2843 __ JumpIfNotRoot(x10, Heap::kempty_stringRootIndex, &runtime);
2848 __ Bind(&check_underlying);
2856 __ TestAndBranchIfAnySet(string_type.X(),
2861 __ Bind(&seq_string);
2865 ASSERT(jssp.Is(
__ StackPointer()));
2866 __ Peek(x10, kPreviousIndexOffset);
2867 __ JumpIfNotSmi(x10, &runtime);
2868 __ Cmp(jsstring_length, x10);
2873 __ SmiUntag(x1, x10);
2885 __ Mov(x10, kPointerSize);
2890 __ Add(x10, regexp_data, x10);
2898 __ JumpIfSmi(code_object, &runtime);
2901 __ IncrementCounter(isolate->counters()->regexp_entry_native(), 1,
2906 __ EnterExitFrame(
false, x10, 1);
2917 __ Mov(x10, ExternalReference::isolate_address(isolate));
2918 __ Poke(x10, kPointerSize);
2920 Register length = w11;
2921 Register previous_index_in_bytes = w12;
2922 Register start = x13;
2930 __ Ldr(subject,
MemOperand(
fp, kSubjectOffset + 2 * kPointerSize));
2937 __ Ubfx(string_encoding, string_encoding, 2, 1);
2938 __ Eor(string_encoding, string_encoding, 1);
2944 __ Lsl(previous_index_in_bytes, w1, string_encoding);
2945 __ Lsl(length, length, string_encoding);
2946 __ Lsl(sliced_string_offset, sliced_string_offset, string_encoding);
2949 __ Mov(x0, subject);
2957 __ Add(w10, previous_index_in_bytes, sliced_string_offset);
2958 __ Add(x2, start, Operand(w10,
UXTW));
2962 __ Sub(w10, length, previous_index_in_bytes);
2963 __ Add(x3, x2, Operand(w10,
UXTW));
2966 __ Mov(x4, ExternalReference::address_of_static_offsets_vector(isolate));
2974 __ Mov(x10, address_of_regexp_stack_memory_address);
2976 __ Mov(x11, address_of_regexp_stack_memory_size);
2978 __ Add(x6, x10, x11);
2985 DirectCEntryStub stub;
2986 stub.GenerateCall(masm, code_object);
2988 __ LeaveExitFrame(
false, x10,
true);
2991 Label failure, exception;
2993 __ CompareAndBranch(w0,
3000 Register number_of_capture_registers = x12;
3007 __ Add(x10, x10, x10);
3008 __ Add(number_of_capture_registers, x10, 2);
3011 ASSERT(jssp.Is(
__ StackPointer()));
3012 __ Peek(x10, kLastMatchInfoOffset);
3013 __ JumpIfSmi(x10, &runtime);
3017 __ Ldr(last_match_info_elements,
3021 __ JumpIfNotRoot(x10, Heap::kFixedArrayMapRootIndex, &runtime);
3036 __ SmiTag(x10, number_of_capture_registers);
3046 __ Mov(x10, subject);
3047 __ RecordWriteField(last_match_info_elements,
3056 __ Mov(x10, subject);
3057 __ RecordWriteField(last_match_info_elements,
3064 Register last_match_offsets = x13;
3065 Register offsets_vector_index = x14;
3066 Register current_offset = x15;
3070 ExternalReference address_of_static_offsets_vector =
3071 ExternalReference::address_of_static_offsets_vector(isolate);
3072 __ Mov(offsets_vector_index, address_of_static_offsets_vector);
3074 Label next_capture, done;
3077 __ Add(last_match_offsets,
3078 last_match_info_elements,
3080 __ Bind(&next_capture);
3081 __ Subs(number_of_capture_registers, number_of_capture_registers, 2);
3085 __ Ldr(current_offset,
3088 __ SmiTag(x10, current_offset);
3095 __ B(&next_capture);
3099 __ Peek(x0, kLastMatchInfoOffset);
3100 __ PopCPURegList(used_callee_saved_registers);
3105 __ Bind(&exception);
3106 Register exception_value = x0;
3110 __ Mov(x10, Operand(isolate->factory()->the_hole_value()));
3112 Operand(ExternalReference(Isolate::kPendingExceptionAddress,
3115 __ Cmp(x10, exception_value);
3121 Label termination_exception;
3122 __ JumpIfRoot(exception_value,
3123 Heap::kTerminationExceptionRootIndex,
3124 &termination_exception);
3126 __ Throw(exception_value, x10, x11, x12, x13);
3128 __ Bind(&termination_exception);
3129 __ ThrowUncatchable(exception_value, x10, x11, x12, x13);
3132 __ Mov(x0, Operand(masm->isolate()->factory()->null_value()));
3133 __ PopCPURegList(used_callee_saved_registers);
3139 __ PopCPURegList(used_callee_saved_registers);
3140 __ TailCallRuntime(Runtime::kHiddenRegExpExec, 4, 1);
3144 __ Bind(¬_seq_nor_cons);
3146 __ B(
ne, ¬_long_external);
3149 __ Bind(&external_string);
3150 if (masm->emit_debug_code()) {
3156 __ Check(
eq, kExternalStringExpectedButNotFound);
3159 __ Check(
ne, kExternalStringExpectedButNotFound);
3170 __ Bind(¬_long_external);
3172 __ TestAndBranchIfAnySet(string_representation,
3177 __ Ldr(sliced_string_offset,
3180 __ B(&check_underlying);
3185 static void GenerateRecordCallTarget(MacroAssembler* masm,
3188 Register feedback_vector,
3191 Register scratch2) {
3194 argc,
function, feedback_vector, index));
3201 Label initialize, done, miss, megamorphic, not_array_function;
3204 masm->isolate()->heap()->megamorphic_symbol());
3206 masm->isolate()->heap()->uninitialized_symbol());
3209 __ Add(scratch1, feedback_vector,
3215 __ Cmp(scratch1,
function);
3218 if (!FLAG_pretenuring_call_new) {
3224 __ JumpIfNotRoot(scratch2, Heap::kAllocationSiteMapRootIndex, &miss);
3228 __ Cmp(
function, scratch1);
3229 __ B(
ne, &megamorphic);
3237 __ JumpIfRoot(scratch1, Heap::kUninitializedSymbolRootIndex, &initialize);
3240 __ Bind(&megamorphic);
3241 __ Add(scratch1, feedback_vector,
3243 __ LoadRoot(scratch2, Heap::kMegamorphicSymbolRootIndex);
3249 __ Bind(&initialize);
3251 if (!FLAG_pretenuring_call_new) {
3254 __ Cmp(
function, scratch1);
3255 __ B(
ne, ¬_array_function);
3262 CreateAllocationSiteStub create_stub;
3266 __ Push(argc,
function, feedback_vector, index);
3270 ASSERT(feedback_vector.Is(x2) && index.Is(x3));
3271 __ CallStub(&create_stub);
3273 __ Pop(index, feedback_vector,
function, argc);
3278 __ Bind(¬_array_function);
3283 __ Add(scratch1, feedback_vector,
3302 Register
function = x1;
3303 Register cache_cell = x2;
3306 Label slow, non_function, wrap, cont;
3311 if (NeedsChecks()) {
3313 __ JumpIfSmi(
function, &non_function);
3318 if (RecordCallTarget()) {
3319 GenerateRecordCallTarget(masm, x0,
function, cache_cell, slot, x4, x5);
3323 __ LoadRoot(cache_cell, Heap::kUndefinedValueRootIndex);
3329 ParameterCount actual(argc_);
3331 if (CallAsMethod()) {
3332 if (NeedsChecks()) {
3343 __ Peek(x3, argc_ * kPointerSize);
3345 if (NeedsChecks()) {
3346 __ JumpIfSmi(x3, &wrap);
3354 __ InvokeFunction(
function,
3359 if (NeedsChecks()) {
3362 if (RecordCallTarget()) {
3367 masm->isolate()->heap()->megamorphic_symbol());
3370 __ LoadRoot(x11, Heap::kMegamorphicSymbolRootIndex);
3377 __ Mov(x0, argc_ + 1);
3379 __ GetBuiltinFunction(x1, Builtins::CALL_FUNCTION_PROXY);
3381 Handle<Code> adaptor =
3382 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline();
3383 __ Jump(adaptor, RelocInfo::CODE_TARGET);
3388 __ Bind(&non_function);
3392 __ GetBuiltinFunction(
function, Builtins::CALL_NON_FUNCTION);
3393 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
3394 RelocInfo::CODE_TARGET);
3397 if (CallAsMethod()) {
3405 __ Poke(x0, argc_ * kPointerSize);
3417 Register
function = x1;
3418 Label slow, non_function_call;
3421 __ JumpIfSmi(
function, &non_function_call);
3423 Register object_type = x10;
3427 if (RecordCallTarget()) {
3428 GenerateRecordCallTarget(masm, x0,
function, x2, x3, x4, x5);
3431 if (FLAG_pretenuring_call_new) {
3437 Label feedback_register_initialized;
3441 __ JumpIfRoot(x5, Heap::kAllocationSiteMapRootIndex,
3442 &feedback_register_initialized);
3443 __ LoadRoot(x2, Heap::kUndefinedValueRootIndex);
3444 __ bind(&feedback_register_initialized);
3447 __ AssertUndefinedOrAllocationSite(x2, x5);
3451 Register jump_reg = x4;
3452 Register shared_func_info = jump_reg;
3453 Register cons_stub = jump_reg;
3454 Register cons_stub_code = jump_reg;
3455 __ Ldr(shared_func_info,
3461 __ Br(cons_stub_code);
3466 __ B(
ne, &non_function_call);
3467 __ GetBuiltinFunction(x1, Builtins::CALL_FUNCTION_PROXY_AS_CONSTRUCTOR);
3470 __ Bind(&non_function_call);
3471 __ GetBuiltinFunction(x1, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR);
3476 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
3477 RelocInfo::CODE_TARGET);
3483 __ JumpIfSmi(object_, receiver_not_string_);
3493 __ JumpIfNotSmi(index_, &index_not_smi_);
3495 __ Bind(&got_smi_index_);
3499 __ B(
ls, index_out_of_range_);
3501 __ SmiUntag(index_);
3514 MacroAssembler* masm,
3515 const RuntimeCallHelper& call_helper) {
3516 __ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase);
3518 __ Bind(&index_not_smi_);
3522 Heap::kHeapNumberMapRootIndex,
3525 call_helper.BeforeCall(masm);
3527 __ Push(object_, index_);
3529 __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero, 1);
3533 __ CallRuntime(Runtime::kHiddenNumberToSmi, 1);
3542 call_helper.AfterCall(masm);
3545 __ JumpIfNotSmi(index_, index_out_of_range_);
3547 __ B(&got_smi_index_);
3552 __ Bind(&call_runtime_);
3553 call_helper.BeforeCall(masm);
3555 __ Push(object_, index_);
3556 __ CallRuntime(Runtime::kHiddenStringCharCodeAt, 2);
3557 __ Mov(result_, x0);
3558 call_helper.AfterCall(masm);
3561 __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase);
3566 __ JumpIfNotSmi(code_, &slow_case_);
3568 __ B(
hi, &slow_case_);
3570 __ LoadRoot(result_, Heap::kSingleCharacterStringCacheRootIndex);
3575 __ JumpIfRoot(result_, Heap::kUndefinedValueRootIndex, &slow_case_);
3581 MacroAssembler* masm,
3582 const RuntimeCallHelper& call_helper) {
3583 __ Abort(kUnexpectedFallthroughToCharFromCodeSlowCase);
3585 __ Bind(&slow_case_);
3586 call_helper.BeforeCall(masm);
3588 __ CallRuntime(Runtime::kCharFromCode, 1);
3589 __ Mov(result_, x0);
3590 call_helper.AfterCall(masm);
3593 __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase);
3597 void ICCompareStub::GenerateSmis(MacroAssembler* masm) {
3603 __ JumpIfEitherNotSmi(x0, x1, &miss);
3605 if (GetCondition() ==
eq) {
3620 void ICCompareStub::GenerateNumbers(MacroAssembler* masm) {
3624 Label unordered, maybe_undefined1, maybe_undefined2;
3625 Label miss, handle_lhs, values_in_d_regs;
3626 Label untag_rhs, untag_lhs;
3628 Register result = x0;
3631 FPRegister rhs_d =
d0;
3632 FPRegister lhs_d =
d1;
3635 __ JumpIfNotSmi(lhs, &miss);
3638 __ JumpIfNotSmi(rhs, &miss);
3645 __ JumpIfSmi(rhs, &handle_lhs);
3646 __ CheckMap(rhs, x10, Heap::kHeapNumberMapRootIndex, &maybe_undefined1,
3651 __ Bind(&handle_lhs);
3652 __ JumpIfSmi(lhs, &values_in_d_regs);
3653 __ CheckMap(lhs, x10, Heap::kHeapNumberMapRootIndex, &maybe_undefined2,
3657 __ Bind(&values_in_d_regs);
3658 __ Fcmp(lhs_d, rhs_d);
3659 __ B(
vs, &unordered);
3661 __ Cset(result,
gt);
3662 __ Csinv(result, result, xzr,
ge);
3665 __ Bind(&unordered);
3668 __ Jump(stub.GetCode(masm->isolate()), RelocInfo::CODE_TARGET);
3670 __ Bind(&maybe_undefined1);
3672 __ JumpIfNotRoot(rhs, Heap::kUndefinedValueRootIndex, &miss);
3673 __ JumpIfSmi(lhs, &unordered);
3678 __ Bind(&maybe_undefined2);
3680 __ JumpIfRoot(lhs, Heap::kUndefinedValueRootIndex, &unordered);
3688 void ICCompareStub::GenerateInternalizedStrings(MacroAssembler* masm) {
3693 Register result = x0;
3698 __ JumpIfEitherSmi(lhs, rhs, &miss);
3701 Register rhs_map = x10;
3702 Register lhs_map = x11;
3703 Register rhs_type = x10;
3704 Register lhs_type = x11;
3711 __ Orr(x12, lhs_type, rhs_type);
3712 __ TestAndBranchIfAnySet(
3718 __ Cset(result,
ne);
3726 void ICCompareStub::GenerateUniqueNames(MacroAssembler* masm) {
3732 Register result = x0;
3736 Register lhs_instance_type = w2;
3737 Register rhs_instance_type = w3;
3740 __ JumpIfEitherSmi(lhs, rhs, &miss);
3751 __ JumpIfNotUniqueName(lhs_instance_type, &miss);
3752 __ JumpIfNotUniqueName(rhs_instance_type, &miss);
3757 __ Cset(result,
ne);
3765 void ICCompareStub::GenerateStrings(MacroAssembler* masm) {
3773 Register result = x0;
3778 __ JumpIfEitherSmi(rhs, lhs, &miss);
3781 Register rhs_map = x10;
3782 Register lhs_map = x11;
3783 Register rhs_type = x10;
3784 Register lhs_type = x11;
3790 __ Orr(x12, lhs_type, rhs_type);
3796 __ B(
ne, ¬_equal);
3800 __ Bind(¬_equal);
3809 Label not_internalized_strings;
3810 __ Orr(x12, lhs_type, rhs_type);
3811 __ TestAndBranchIfAnySet(
3815 __ Bind(¬_internalized_strings);
3820 __ JumpIfBothInstanceTypesAreNotSequentialAscii(
3821 lhs_type, rhs_type, x12, x13, &runtime);
3826 masm, lhs, rhs, x10, x11, x12);
3829 masm, lhs, rhs, x10, x11, x12, x13);
3836 __ TailCallRuntime(Runtime::kStringEquals, 2, 1);
3838 __ TailCallRuntime(Runtime::kHiddenStringCompare, 2, 1);
3846 void ICCompareStub::GenerateObjects(MacroAssembler* masm) {
3852 Register result = x0;
3856 __ JumpIfEitherSmi(rhs, lhs, &miss);
3862 __ Sub(result, rhs, lhs);
3870 void ICCompareStub::GenerateKnownObjects(MacroAssembler* masm) {
3875 Register result = x0;
3879 __ JumpIfEitherSmi(rhs, lhs, &miss);
3881 Register rhs_map = x10;
3882 Register lhs_map = x11;
3885 __ Cmp(rhs_map, Operand(known_map_));
3887 __ Cmp(lhs_map, Operand(known_map_));
3890 __ Sub(result, rhs, lhs);
3902 void ICCompareStub::GenerateMiss(MacroAssembler* masm) {
3905 Register stub_entry = x11;
3907 ExternalReference miss =
3908 ExternalReference(IC_Utility(IC::kCompareIC_Miss), masm->isolate());
3913 Register right = x0;
3915 __ Push(x1, x0,
lr);
3918 __ Push(left, right, op);
3921 __ CallExternalReference(miss, 3);
3930 __ Jump(stub_entry);
3936 Register character) {
3940 __ LoadRoot(hash, Heap::kHashSeedRootIndex);
3945 Register hash_w = hash.W();
3948 __ Add(hash_w, hash_w, Operand(hash_w,
LSL, 10));
3950 __ Eor(hash_w, hash_w, Operand(hash_w,
LSR, 6));
3956 Register character) {
3960 __ Add(hash, hash, character);
3963 Register hash_w = hash.W();
3966 __ Add(hash_w, hash_w, Operand(hash_w,
LSL, 10));
3968 __ Eor(hash_w, hash_w, Operand(hash_w,
LSR, 6));
3976 Register hash_w = hash.W();
3977 Register scratch_w = scratch.W();
3981 __ Add(hash_w, hash_w, Operand(hash_w,
LSL, 3));
3983 __ Eor(hash_w, hash_w, Operand(hash_w,
LSR, 11));
3985 __ Add(hash_w, hash_w, Operand(hash_w,
LSL, 15));
3991 __ Csel(hash_w, scratch_w, hash_w,
eq);
3995 void SubStringStub::Generate(MacroAssembler* masm) {
4017 Register from = x15;
4018 Register input_string = x10;
4019 Register input_length = x11;
4020 Register input_type = x12;
4021 Register result_string = x0;
4022 Register result_length = x1;
4025 __ Peek(to, kToOffset);
4026 __ Peek(from, kFromOffset);
4029 __ JumpIfEitherNotSmi(from, to, &runtime);
4034 __ Subs(result_length, to, from);
4041 __ Peek(input_string, kStringOffset);
4042 __ JumpIfSmi(input_string, &runtime);
4043 __ IsObjectJSStringType(input_string, input_type, &runtime);
4046 __ Cmp(result_length, 1);
4047 __ B(
eq, &single_char);
4051 __ Ldrsw(input_length,
4054 __ Cmp(result_length, input_length);
4055 __ CmovX(x0, input_string,
eq);
4057 __ B(
eq, &return_x0);
4074 Label underlying_unpacked, sliced_string, seq_or_external_string;
4075 Label update_instance_type;
4083 __ B(
eq, &seq_or_external_string);
4085 __ B(
ne, &sliced_string);
4087 Register unpacked_string = input_string;
4091 __ JumpIfNotRoot(temp, Heap::kempty_stringRootIndex, &runtime);
4092 __ Ldr(unpacked_string,
4094 __ B(&update_instance_type);
4096 __ Bind(&sliced_string);
4100 __ Add(from, from, temp);
4101 __ Ldr(unpacked_string,
4104 __ Bind(&update_instance_type);
4110 __ Bind(&seq_or_external_string);
4121 __ Bind(&underlying_unpacked);
4123 if (FLAG_string_slices) {
4127 __ B(
lt, ©_routine);
4133 Label two_byte_slice, set_slice_header;
4137 __ AllocateAsciiSlicedString(result_string, result_length, x3, x4,
4139 __ B(&set_slice_header);
4141 __ Bind(&two_byte_slice);
4142 __ AllocateTwoByteSlicedString(result_string, result_length, x3, x4,
4145 __ Bind(&set_slice_header);
4148 __ Str(unpacked_string,
4152 __ Bind(©_routine);
4164 Register unpacked_char0 = x13;
4165 Register substring_char0 = x13;
4166 Register result_char0 = x14;
4167 Label two_byte_sequential, sequential_string, allocate_result;
4172 __ B(
eq, &sequential_string);
4176 __ Ldr(unpacked_char0,
4179 __ B(&allocate_result);
4181 __ Bind(&sequential_string);
4184 __ Add(unpacked_char0, unpacked_string,
4187 __ Bind(&allocate_result);
4193 __ AllocateAsciiString(result_string, result_length, x3, x4, x5, &runtime);
4196 __ Add(substring_char0, unpacked_char0, from);
4199 __ Add(result_char0, result_string,
4207 __ Bind(&two_byte_sequential);
4208 __ AllocateTwoByteString(result_string, result_length, x3, x4, x5, &runtime);
4211 __ Add(substring_char0, unpacked_char0, Operand(from,
LSL, 1));
4214 __ Add(result_char0, result_string,
4218 __ Add(result_length, result_length, result_length);
4221 __ Bind(&return_x0);
4222 Counters* counters = masm->isolate()->counters();
4223 __ IncrementCounter(counters->sub_string_native(), 1, x3, x4);
4228 __ TailCallRuntime(Runtime::kHiddenSubString, 3, 1);
4230 __ bind(&single_char);
4236 StringCharAtGenerator generator(
4237 input_string, from, result_length, x0,
4239 generator.GenerateFast(masm);
4242 generator.SkipSlow(masm, &runtime);
4251 Register scratch3) {
4253 Register result = x0;
4254 Register left_length = scratch1;
4255 Register right_length = scratch2;
4259 Label strings_not_equal, check_zero_length;
4262 __ Cmp(left_length, right_length);
4263 __ B(
eq, &check_zero_length);
4265 __ Bind(&strings_not_equal);
4270 Label compare_chars;
4271 __ Bind(&check_zero_length);
4273 __ Cbnz(left_length, &compare_chars);
4278 __ Bind(&compare_chars);
4279 GenerateAsciiCharsCompareLoop(masm, left, right, left_length, scratch2,
4280 scratch3, &strings_not_equal);
4294 Register scratch4) {
4296 Label result_not_equal, compare_lengths;
4299 Register length_delta = scratch3;
4302 __ Subs(length_delta, scratch1, scratch2);
4304 Register min_length = scratch1;
4305 __ Csel(min_length, scratch2, scratch1,
gt);
4306 __ Cbz(min_length, &compare_lengths);
4309 GenerateAsciiCharsCompareLoop(masm,
4310 left, right, min_length, scratch2, scratch4,
4314 __ Bind(&compare_lengths);
4319 Register result = x0;
4320 __ Subs(result, length_delta, 0);
4322 __ Bind(&result_not_equal);
4324 Register
less = x11;
4327 __ CmovX(result, greater,
gt);
4328 __ CmovX(result, less,
lt);
4333 void StringCompareStub::GenerateAsciiCharsCompareLoop(
4334 MacroAssembler* masm,
4340 Label* chars_not_equal) {
4346 __ SmiUntag(length);
4348 __ Add(left, left, scratch1);
4349 __ Add(right, right, scratch1);
4351 Register index = length;
4352 __ Neg(index, length);
4359 __ Cmp(scratch1, scratch2);
4360 __ B(
ne, chars_not_equal);
4361 __ Add(index, index, 1);
4362 __ Cbnz(index, &loop);
4366 void StringCompareStub::Generate(MacroAssembler* masm) {
4369 Counters* counters = masm->isolate()->counters();
4374 Register right = x10;
4375 Register left = x11;
4376 Register result = x0;
4377 __ Pop(right, left);
4380 __ Subs(result, right, left);
4381 __ B(
ne, ¬_same);
4383 __ IncrementCounter(counters->string_compare_native(), 1, x3, x4);
4389 __ JumpIfEitherIsNotSequentialAsciiStrings(left, right, x12, x13, &runtime);
4393 __ IncrementCounter(counters->string_compare_native(), 1, x3, x4);
4401 __ Push(left, right);
4405 __ TailCallRuntime(Runtime::kHiddenStringCompare, 2, 1);
4410 Register receiver = x0;
4412 int argc = arguments_count();
4422 Isolate* isolate = masm->isolate();
4425 __ TailCallExternalReference(
4426 ExternalReference(Builtins::c_ArrayPush, isolate), argc + 1, 1);
4430 Label call_builtin, attempt_to_grow_elements, with_write_barrier;
4432 Register elements_length = x8;
4433 Register length = x7;
4434 Register elements = x6;
4435 Register end_elements = x5;
4436 Register value = x4;
4442 __ CheckMap(elements,
4444 Heap::kFixedArrayMapRootIndex,
4455 __ Ldr(elements_length,
4457 __ Cmp(length, elements_length);
4459 const int kEndElementsOffset =
4463 __ B(
gt, &attempt_to_grow_elements);
4466 __ Peek(value, (argc - 1) * kPointerSize);
4467 __ JumpIfNotSmi(value, &with_write_barrier);
4472 __ Add(end_elements, elements,
4476 __ B(
gt, &call_builtin);
4478 __ Peek(value, (argc - 1) * kPointerSize);
4479 __ StoreNumberToDoubleElements(value, length, elements, x10,
d0,
d1,
4492 __ Bind(&call_builtin);
4493 __ TailCallExternalReference(
4494 ExternalReference(Builtins::c_ArrayPush, isolate), argc + 1, 1);
4498 __ Bind(&with_write_barrier);
4501 if (FLAG_trace_elements_transitions) {
4502 __ B(&call_builtin);
4506 __ JumpIfHeapNumber(x10, &call_builtin);
4515 const int origin_offset = header_size + elements_kind() *
kPointerSize;
4519 __ B(
ne, &call_builtin);
4521 const int target_offset = header_size + target_kind *
kPointerSize;
4523 __ Mov(x11, receiver);
4534 __ Add(end_elements, elements,
4538 __ RecordWrite(elements,
4549 __ Bind(&attempt_to_grow_elements);
4551 if (!FLAG_inline_new) {
4552 __ B(&call_builtin);
4555 Register argument = x2;
4556 __ Peek(argument, (argc - 1) * kPointerSize);
4560 __ JumpIfNotSmi(argument, &call_builtin);
4566 ExternalReference new_space_allocation_top =
4567 ExternalReference::new_space_allocation_top_address(isolate);
4568 ExternalReference new_space_allocation_limit =
4569 ExternalReference::new_space_allocation_limit_address(isolate);
4571 const int kAllocationDelta = 4;
4572 ASSERT(kAllocationDelta >= argc);
4573 Register allocation_top_addr = x5;
4574 Register allocation_top = x9;
4576 __ Add(end_elements, elements,
4578 __ Add(end_elements, end_elements, kEndElementsOffset);
4579 __ Mov(allocation_top_addr, new_space_allocation_top);
4580 __ Ldr(allocation_top,
MemOperand(allocation_top_addr));
4581 __ Cmp(end_elements, allocation_top);
4582 __ B(
ne, &call_builtin);
4584 __ Mov(x10, new_space_allocation_limit);
4586 __ Add(allocation_top, allocation_top, kAllocationDelta * kPointerSize);
4587 __ Cmp(allocation_top, x10);
4588 __ B(
hi, &call_builtin);
4592 __ Str(allocation_top,
MemOperand(allocation_top_addr));
4596 __ LoadRoot(x10, Heap::kTheHoleValueRootIndex);
4597 ASSERT(kAllocationDelta == 4);
4598 __ Stp(x10, x10,
MemOperand(end_elements, 1 * kPointerSize));
4599 __ Stp(x10, x10,
MemOperand(end_elements, 3 * kPointerSize));
4603 __ Add(elements_length, elements_length,
Smi::FromInt(kAllocationDelta));
4604 __ Str(elements_length,
4612 __ Bind(&call_builtin);
4613 __ TailCallExternalReference(
4614 ExternalReference(Builtins::c_ArrayPush, isolate), argc + 1, 1);
4618 void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) {
4624 Isolate* isolate = masm->isolate();
4629 __ LoadObject(x2,
handle(isolate->heap()->undefined_value()));
4632 if (FLAG_debug_code) {
4633 __ AssertNotSmi(x2, kExpectedAllocationSite);
4635 __ AssertRegisterIsRoot(x10, Heap::kAllocationSiteMapRootIndex,
4636 kExpectedAllocationSite);
4641 BinaryOpWithAllocationSiteStub stub(state_);
4642 __ TailCallStub(&stub);
4646 bool CodeStub::CanUseFPRegisters() {
4652 void RecordWriteStub::GenerateIncremental(MacroAssembler* masm, Mode
mode) {
4658 Label dont_need_remembered_set;
4660 Register value = regs_.scratch0();
4662 __ JumpIfNotInNewSpace(value, &dont_need_remembered_set);
4664 __ CheckPageFlagSet(regs_.object(),
4667 &dont_need_remembered_set);
4671 CheckNeedsToInformIncrementalMarker(
4672 masm, kUpdateRememberedSetOnNoNeedToInformIncrementalMarker, mode);
4673 InformIncrementalMarker(masm);
4674 regs_.Restore(masm);
4676 __ RememberedSetHelper(object_,
4682 __ Bind(&dont_need_remembered_set);
4685 CheckNeedsToInformIncrementalMarker(
4686 masm, kReturnOnNoNeedToInformIncrementalMarker, mode);
4687 InformIncrementalMarker(masm);
4688 regs_.Restore(masm);
4693 void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm) {
4694 regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode_);
4696 x0.Is(regs_.address()) ? regs_.scratch0() : regs_.address();
4697 ASSERT(!address.Is(regs_.object()));
4699 __ Mov(address, regs_.address());
4700 __ Mov(x0, regs_.object());
4701 __ Mov(x1, address);
4702 __ Mov(x2, ExternalReference::isolate_address(masm->isolate()));
4704 AllowExternalCallThatCantCauseGC scope(masm);
4705 ExternalReference
function =
4706 ExternalReference::incremental_marking_record_write_function(
4708 __ CallCFunction(
function, 3, 0);
4710 regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode_);
4714 void RecordWriteStub::CheckNeedsToInformIncrementalMarker(
4715 MacroAssembler* masm,
4716 OnNoNeedToInformIncrementalMarker on_no_need,
4719 Label need_incremental;
4720 Label need_incremental_pop_scratch;
4722 Register mem_chunk = regs_.scratch0();
4723 Register counter = regs_.scratch1();
4727 __ Subs(counter, counter, 1);
4730 __ B(
mi, &need_incremental);
4733 __ JumpIfBlack(regs_.object(), regs_.scratch0(), regs_.scratch1(), &on_black);
4735 regs_.Restore(masm);
4736 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
4737 __ RememberedSetHelper(object_,
4748 Register value = regs_.scratch0();
4752 Label ensure_not_white;
4754 __ CheckPageFlagClear(value,
4759 __ CheckPageFlagClear(regs_.object(),
4764 __ Bind(&ensure_not_white);
4769 __ Push(regs_.address(), regs_.object());
4770 __ EnsureNotWhite(value,
4775 &need_incremental_pop_scratch);
4776 __ Pop(regs_.object(), regs_.address());
4778 regs_.Restore(masm);
4779 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
4780 __ RememberedSetHelper(object_,
4789 __ Bind(&need_incremental_pop_scratch);
4790 __ Pop(regs_.object(), regs_.address());
4792 __ Bind(&need_incremental);
4797 void RecordWriteStub::Generate(MacroAssembler* masm) {
4798 Label skip_to_incremental_noncompacting;
4799 Label skip_to_incremental_compacting;
4807 InstructionAccurateScope scope(masm, 2);
4808 __ adr(xzr, &skip_to_incremental_noncompacting);
4809 __ adr(xzr, &skip_to_incremental_compacting);
4813 __ RememberedSetHelper(object_,
4821 __ Bind(&skip_to_incremental_noncompacting);
4824 __ Bind(&skip_to_incremental_compacting);
4829 void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) {
4835 Register value = x0;
4836 Register index_smi = x3;
4838 Register array = x1;
4839 Register array_map = x2;
4840 Register array_index_smi = x4;
4841 __ PeekPair(array_index_smi, array, 0);
4844 Label double_elements, smi_element, fast_elements, slow_elements;
4845 Register bitfield2 = x10;
4855 __ B(
hi, &double_elements);
4857 __ JumpIfSmi(value, &smi_element);
4865 __ Bind(&slow_elements);
4866 __ Push(array, index_smi, value);
4869 __ Push(x11, array_index_smi);
4870 __ TailCallRuntime(Runtime::kStoreArrayLiteralElement, 5, 1);
4873 __ Bind(&fast_elements);
4885 __ Bind(&smi_element);
4891 __ Bind(&double_elements);
4893 __ StoreNumberToDoubleElements(value, index_smi, x10, x11,
d0,
d1,
4899 void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
4901 __ Call(ces.GetCode(masm->isolate()), RelocInfo::CODE_TARGET);
4902 int parameter_count_offset =
4908 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE);
4917 static const unsigned int kProfileEntryHookCallSize =
4922 if (masm->isolate()->function_entry_hook() !=
NULL) {
4925 Label entry_hook_call_start;
4926 __ Bind(&entry_hook_call_start);
4929 ASSERT(masm->SizeOfCodeGeneratedSince(&entry_hook_call_start) ==
4930 kProfileEntryHookCallSize);
4937 void ProfileEntryHookStub::Generate(MacroAssembler* masm) {
4938 MacroAssembler::NoUseRealAbortsScope no_use_real_aborts(masm);
4948 __ Sub(x0,
lr, kProfileEntryHookCallSize);
4950 #if V8_HOST_ARCH_ARM64
4951 uintptr_t entry_hook =
4952 reinterpret_cast<uintptr_t
>(masm->isolate()->function_entry_hook());
4953 __ Mov(x10, entry_hook);
4958 __ Mov(x10, Operand(ExternalReference(&dispatcher,
4959 ExternalReference::BUILTIN_CALL,
4962 __ Mov(x2, ExternalReference::isolate_address(masm->isolate()));
4967 __ Add(x1,
__ StackPointer(), kNumSavedRegs * kPointerSize);
4971 FrameScope frame(masm, StackFrame::MANUAL);
4972 __ CallCFunction(x10, 2, 0);
4986 const Register old_stack_pointer =
__ StackPointer();
4987 __ SetStackPointer(csp);
4997 __ SetStackPointer(old_stack_pointer);
5007 reinterpret_cast<intptr_t
>(GetCode(masm->isolate()).location());
5008 __ Mov(
lr, Operand(code, RelocInfo::CODE_TARGET));
5009 __ Mov(x10, target);
5022 MacroAssembler* masm,
5028 Register scratch2) {
5032 __ AssertName(name);
5036 __ Sub(scratch1, scratch1, 1);
5039 for (
int i = 0; i < kInlinedProbes; i++) {
5046 ASSERT(NameDictionary::GetProbeOffset(i) <
5048 __ Add(scratch2, scratch2, Operand(
5055 __ Add(scratch2, scratch2, Operand(scratch2,
LSL, 1));
5058 UseScratchRegisterScope temps(masm);
5059 Register scratch3 = temps.AcquireX();
5062 __ Cmp(name, scratch3);
5070 spill_list.Combine(
lr);
5071 spill_list.Remove(scratch1);
5072 spill_list.Remove(scratch2);
5074 __ PushCPURegList(spill_list);
5077 ASSERT(!elements.is(x1));
5079 __ Mov(x0, elements);
5081 __ Mov(x0, elements);
5088 __ Cbz(x0, ¬_found);
5089 __ Mov(scratch2, x2);
5090 __ PopCPURegList(spill_list);
5093 __ Bind(¬_found);
5094 __ PopCPURegList(spill_list);
5103 Register properties,
5105 Register scratch0) {
5107 ASSERT(name->IsUniqueName());
5113 for (
int i = 0; i < kInlinedProbes; i++) {
5116 Register index = scratch0;
5119 __ Sub(index, index, 1);
5120 __ And(index, index, name->Hash() + NameDictionary::GetProbeOffset(i));
5124 __ Add(index, index, Operand(index,
LSL, 1));
5126 Register entity_name = scratch0;
5128 Register tmp = index;
5132 __ JumpIfRoot(entity_name, Heap::kUndefinedValueRootIndex, done);
5135 __ Cmp(entity_name, Operand(name));
5139 __ JumpIfRoot(entity_name, Heap::kTheHoleValueRootIndex, &good);
5143 __ Ldrb(entity_name,
5145 __ JumpIfNotUniqueName(entity_name, miss);
5150 spill_list.Combine(
lr);
5151 spill_list.Remove(scratch0);
5153 __ PushCPURegList(spill_list);
5156 __ Mov(x1, Operand(name));
5161 __ Mov(scratch0, x0);
5162 __ PopCPURegList(spill_list);
5164 __ Cbz(scratch0, done);
5180 Register result = x0;
5181 Register dictionary = x0;
5183 Register index = x2;
5186 Register undefined = x5;
5187 Register entry_key = x6;
5189 Label in_dictionary, maybe_in_dictionary, not_in_dictionary;
5192 __ Sub(mask, mask, 1);
5195 __ LoadRoot(undefined, Heap::kUndefinedValueRootIndex);
5197 for (
int i = kInlinedProbes; i < kTotalProbes; i++) {
5204 ASSERT(NameDictionary::GetProbeOffset(i) <
5209 __ Mov(index, hash);
5215 __ Add(index, index, Operand(index,
LSL, 1));
5221 __ Cmp(entry_key, undefined);
5222 __ B(
eq, ¬_in_dictionary);
5225 __ Cmp(entry_key, key);
5226 __ B(
eq, &in_dictionary);
5232 __ JumpIfNotUniqueName(entry_key, &maybe_in_dictionary);
5236 __ Bind(&maybe_in_dictionary);
5245 __ Bind(&in_dictionary);
5249 __ Bind(¬_in_dictionary);
5256 static void CreateArrayDispatch(MacroAssembler* masm,
5261 __ TailCallStub(&stub);
5267 for (
int i = 0; i <= last_index; ++i) {
5272 __ CompareAndBranch(kind, candidate_kind,
ne, &next);
5273 T stub(candidate_kind);
5274 __ TailCallStub(&stub);
5279 __ Abort(kUnexpectedElementsKindInArrayConstructor);
5289 static void CreateArrayDispatchOneArgument(MacroAssembler* masm,
5298 Register allocation_site = x2;
5301 Label normal_sequence;
5311 __ Tbnz(kind, 0, &normal_sequence);
5317 __ Cbz(x10, &normal_sequence);
5323 ArraySingleArgumentConstructorStub stub_holey(holey_initial,
5325 __ TailCallStub(&stub_holey);
5327 __ Bind(&normal_sequence);
5328 ArraySingleArgumentConstructorStub stub(initial,
5330 __ TailCallStub(&stub);
5334 __ Orr(kind, kind, 1);
5336 if (FLAG_debug_code) {
5338 __ JumpIfNotRoot(x10, Heap::kAllocationSiteMapRootIndex,
5340 __ Assert(
eq, kExpectedAllocationSite);
5353 __ Bind(&normal_sequence);
5356 for (
int i = 0; i <= last_index; ++i) {
5359 __ CompareAndBranch(kind, candidate_kind,
ne, &next);
5360 ArraySingleArgumentConstructorStub stub(candidate_kind);
5361 __ TailCallStub(&stub);
5366 __ Abort(kUnexpectedElementsKindInArrayConstructor);
5374 static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) {
5377 for (
int i = 0; i <= to_index; ++i) {
5380 stub.GetCode(isolate);
5383 stub1.GetCode(isolate);
5390 ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>(
5392 ArrayConstructorStubAheadOfTimeHelper<ArraySingleArgumentConstructorStub>(
5394 ArrayConstructorStubAheadOfTimeHelper<ArrayNArgumentsConstructorStub>(
5402 for (
int i = 0; i < 2; i++) {
5404 InternalArrayNoArgumentConstructorStub stubh1(kinds[i]);
5405 stubh1.GetCode(isolate);
5406 InternalArraySingleArgumentConstructorStub stubh2(kinds[i]);
5407 stubh2.GetCode(isolate);
5408 InternalArrayNArgumentsConstructorStub stubh3(kinds[i]);
5409 stubh3.GetCode(isolate);
5414 void ArrayConstructorStub::GenerateDispatchToArrayStub(
5415 MacroAssembler* masm,
5418 if (argument_count_ ==
ANY) {
5419 Label zero_case, n_case;
5420 __ Cbz(argc, &zero_case);
5425 CreateArrayDispatchOneArgument(masm, mode);
5427 __ Bind(&zero_case);
5429 CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm,
mode);
5433 CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm,
mode);
5435 }
else if (argument_count_ ==
NONE) {
5436 CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm,
mode);
5437 }
else if (argument_count_ ==
ONE) {
5438 CreateArrayDispatchOneArgument(masm, mode);
5440 CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm,
mode);
5456 Register constructor = x1;
5457 Register allocation_site = x2;
5459 if (FLAG_debug_code) {
5463 Label unexpected_map, map_ok;
5468 __ JumpIfSmi(x10, &unexpected_map);
5469 __ JumpIfObjectType(x10, x10, x11,
MAP_TYPE, &map_ok);
5470 __ Bind(&unexpected_map);
5471 __ Abort(kUnexpectedInitialMapForArrayFunction);
5476 __ AssertUndefinedOrAllocationSite(allocation_site, x10);
5482 __ JumpIfRoot(allocation_site, Heap::kUndefinedValueRootIndex, &no_info);
5495 void InternalArrayConstructorStub::GenerateCase(
5497 Label zero_case, n_case;
5500 __ Cbz(argc, &zero_case);
5501 __ CompareAndBranch(argc, 1,
ne, &n_case);
5509 __ Cbz(x10, &packed_case);
5511 InternalArraySingleArgumentConstructorStub
5513 __ TailCallStub(&stub1_holey);
5515 __ Bind(&packed_case);
5517 InternalArraySingleArgumentConstructorStub stub1(kind);
5518 __ TailCallStub(&stub1);
5520 __ Bind(&zero_case);
5522 InternalArrayNoArgumentConstructorStub stub0(kind);
5523 __ TailCallStub(&stub0);
5527 InternalArrayNArgumentsConstructorStub stubN(kind);
5528 __ TailCallStub(&stubN);
5539 Handle<Object> undefined_sentinel(
5540 masm->isolate()->heap()->undefined_value(), masm->isolate());
5542 Register constructor = x1;
5544 if (FLAG_debug_code) {
5548 Label unexpected_map, map_ok;
5553 __ JumpIfSmi(x10, &unexpected_map);
5554 __ JumpIfObjectType(x10, x10, x11,
MAP_TYPE, &map_ok);
5555 __ Bind(&unexpected_map);
5556 __ Abort(kUnexpectedInitialMapForArrayFunction);
5566 __ LoadElementsKindFromMap(kind, x10);
5568 if (FLAG_debug_code) {
5572 __ Assert(
eq, kInvalidElementsKindForInternalArrayOrInternalPackedArray);
5575 Label fast_elements_case;
5579 __ Bind(&fast_elements_case);
5584 void CallApiFunctionStub::Generate(MacroAssembler* masm) {
5598 Register callee = x0;
5599 Register call_data = x4;
5600 Register holder = x2;
5601 Register api_function_address = x1;
5602 Register context =
cp;
5604 int argc = ArgumentBits::decode(bit_field_);
5605 bool is_store = IsStoreBits::decode(bit_field_);
5606 bool call_data_undefined = CallDataUndefinedBits::decode(bit_field_);
5608 typedef FunctionCallbackArguments FCA;
5619 Isolate* isolate = masm->isolate();
5622 __ Push(context, callee, call_data);
5627 if (!call_data_undefined) {
5628 __ LoadRoot(call_data, Heap::kUndefinedValueRootIndex);
5630 Register isolate_reg = x5;
5631 __ Mov(isolate_reg, ExternalReference::isolate_address(isolate));
5635 __ Push(call_data, call_data, isolate_reg, holder);
5639 __ Mov(args, masm->StackPointer());
5643 const int kApiStackSpace = 4;
5647 const int kCallApiFunctionSpillSpace = 4;
5649 FrameScope frame_scope(masm, StackFrame::MANUAL);
5650 __ EnterExitFrame(
false, x10, kApiStackSpace + kCallApiFunctionSpillSpace);
5657 __ Add(x10, args, Operand((FCA::kArgsLength - 1 + argc) * kPointerSize));
5664 const int kStackUnwindSpace = argc + FCA::kArgsLength + 1;
5667 ApiFunction thunk_fun(thunk_address);
5668 ExternalReference thunk_ref = ExternalReference(&thunk_fun, thunk_type,
5671 AllowExternalCallThatCantCauseGC scope(masm);
5673 fp, (2 + FCA::kContextSaveIndex) * kPointerSize);
5675 int return_value_offset = 0;
5677 return_value_offset = 2 + FCA::kArgsLength;
5679 return_value_offset = 2 + FCA::kReturnValueOffset;
5681 MemOperand return_value_operand(
fp, return_value_offset * kPointerSize);
5683 const int spill_offset = 1 + kApiStackSpace;
5684 __ CallApiFunctionAndReturn(api_function_address,
5688 return_value_operand,
5689 &context_restore_operand);
5693 void CallApiGetterStub::Generate(MacroAssembler* masm) {
5701 Register api_function_address = x2;
5703 __ Mov(x0, masm->StackPointer());
5704 __ Add(x1, x0, 1 * kPointerSize);
5706 const int kApiStackSpace = 1;
5710 const int kCallApiFunctionSpillSpace = 4;
5712 FrameScope frame_scope(masm, StackFrame::MANUAL);
5713 __ EnterExitFrame(
false, x10, kApiStackSpace + kCallApiFunctionSpillSpace);
5717 __ Poke(x1, 1 * kPointerSize);
5724 ExternalReference::PROFILING_GETTER_CALL;
5725 ApiFunction thunk_fun(thunk_address);
5726 ExternalReference thunk_ref = ExternalReference(&thunk_fun, thunk_type,
5729 const int spill_offset = 1 + kApiStackSpace;
5730 __ CallApiFunctionAndReturn(api_function_address,
5743 #endif // V8_TARGET_ARCH_ARM64
static const int kResourceDataOffset
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter NULL
void GenerateFast(MacroAssembler *masm)
static const int kLengthOffset
static const int kHashFieldOffset
static const int kBitFieldOffset
void GenerateFast(MacroAssembler *masm)
STATIC_CHECK((kStringRepresentationMask|kStringEncodingMask)==Internals::kFullStringRepresentationMask)
static const int kArgsLength
const intptr_t kSmiTagMask
static const int kEvacuationCandidateMask
void GenerateSlow(MacroAssembler *masm, const RuntimeCallHelper &call_helper)
bool IsHoleyElementsKind(ElementsKind kind)
const RegList kCallerSaved
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
static const int kPrototypeOrInitialMapOffset
#define COMPARE(asm_, compare_string)
static const int kValueOffset
static void GenerateAheadOfTime(Isolate *isolate)
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf map
static const uint32_t kMask
static const int kCallerStackParameterCountFrameOffset
void Generate(MacroAssembler *masm)
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
const uint32_t kTwoByteStringTag
static const int kCallSizeWithRelocation
const LowDwVfpRegister d0
static Failure * InternalError()
static Smi * FromInt(int value)
#define ASM_LOCATION(message)
void Generate(MacroAssembler *masm)
static const int kDataOffset
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
static const int kJSRegexpStaticOffsetsVectorSize
static Representation Integer32()
static void GenerateAheadOfTime(Isolate *isolate)
const unsigned kByteSizeInBytes
const unsigned kXRegSizeInBits
static void GenerateHashGetHash(MacroAssembler *masm, Register hash)
TypeImpl< ZoneTypeConfig > Type
static const int kExponentBias
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor) V8_OVERRIDE
static const intptr_t kPageAlignmentMask
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
static Failure * Exception()
const uint32_t kIsNotInternalizedMask
int MaskToBit(uint64_t mask)
AllocationSiteOverrideMode
static const Function * FunctionForId(FunctionId id)
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
#define ASSERT(condition)
void Generate(MacroAssembler *masm)
friend class BlockConstPoolScope
static const int kContextOffset
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
static void GenerateAheadOfTime(Isolate *isolate)
static Handle< Object > MegamorphicSentinel(Isolate *isolate)
const int kPointerSizeLog2
static void GenerateCompareFlatAsciiStrings(MacroAssembler *masm, Register left, Register right, Register scratch1, Register scratch2, Register scratch3, Register scratch4)
const LowDwVfpRegister d3
const uint32_t kStringRepresentationMask
static const int kCallerFPOffset
static const int kEntrySize
RestoreRegistersStateStub(SaveFPRegsMode with_fp)
const intptr_t kObjectAlignmentMask
static Operand UntagSmiAndScale(Register smi, int scale)
static const int kContextOffset
const uint32_t kShortExternalStringMask
int GetSequenceIndexFromFastElementsKind(ElementsKind elements_kind)
static void GenerateAheadOfTime(Isolate *isolate)
static const int kLastSubjectOffset
static const int kZeroHash
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
void GenerateCall(MacroAssembler *masm, Register target)
static const int kLastCaptureCountOffset
static const int kFirstOffset
static const int kMinLength
const uint32_t kNotStringTag
static const int kParentOffset
MemOperand UntagSmiFieldMemOperand(Register object, int offset)
static const int kLiteralsOffset
const int kFastElementsKindPackedToHoley
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_string(expose_natives_as
DwVfpRegister DoubleRegister
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object size
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
static const int kLengthOffset
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
const LowDwVfpRegister d7
const intptr_t kFailureTagMask
const LowDwVfpRegister d4
static const int kValueOffset
const int kFailureTagSize
bool IsFastPackedElementsKind(ElementsKind kind)
static void GenerateFlatAsciiStringEquals(MacroAssembler *masm, Register left, Register right, Register scratch1, Register scratch2, Register scratch3)
static const int kIrregexpCaptureCountOffset
static const size_t kWriteBarrierCounterOffset
static const int8_t kMaximumBitField2FastHoleyElementValue
const uint32_t kIsIndirectStringMask
ElementsKind GetFastElementsKindFromSequenceIndex(int sequence_number)
const LowDwVfpRegister d6
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
static const int kTransitionInfoOffset
const unsigned kInstructionSize
StoreRegistersStateStub(SaveFPRegsMode with_fp)
static void MaybeCallEntryHook(MacroAssembler *masm)
static void GenerateAheadOfTime(Isolate *isolate)
void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
static const int kCallerSPOffset
static const uint32_t kShift
static const int kPropertiesOffset
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
virtual void Generate(MacroAssembler *masm)
bool IsFastSmiElementsKind(ElementsKind kind)
static const int kMinLength
const uint32_t kShortExternalStringTag
static void GenerateHashAddCharacter(MacroAssembler *masm, Register hash, Register character)
static void Generate(MacroAssembler *masm, Register string, Register index, Register result, Label *call_runtime)
static const int kHeaderSize
void Generate(MacroAssembler *masm)
static void GenerateStubsAheadOfTime(Isolate *isolate)
static const int kElementsOffset
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
const uint32_t kStringTag
void Generate(MacroAssembler *masm)
static bool IsEqualityOp(Value op)
static Representation External()
static const int kOffsetOffset
static const int kSPOffset
const uint32_t kInternalizedTag
static Register to_be_pushed_lr()
static const int kLengthOffset
#define T(name, string, precedence)
static void GenerateNegativeLookup(MacroAssembler *masm, Label *miss, Label *done, Register receiver, Register properties, Handle< Name > name, Register scratch0)
void GenerateSlow(MacroAssembler *masm, const RuntimeCallHelper &call_helper)
virtual void Generate(MacroAssembler *masm)
bool IsFastSmiOrObjectElementsKind(ElementsKind kind)
static const int kLastMatchOverhead
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
static const int kHeaderSize
void Generate(MacroAssembler *masm)
static Builtins::Name MissBuiltin(Code::Kind kind)
static const int kMapOffset
static const int kMaxShortLength
static const int kSkipEvacuationSlotsRecordingMask
const uint32_t kIsNotStringMask
const LowDwVfpRegister d2
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
const uint32_t kSlicedNotConsMask
static const int kLengthOffset
void Generate(MacroAssembler *masm)
static const int kSecondOffset
static Handle< Object > UninitializedSentinel(Isolate *isolate)
Register GetAllocatableRegisterThatIsNotOneOf(Register reg1, Register reg2=NoReg, Register reg3=NoReg, Register reg4=NoReg)
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor) V8_OVERRIDE
Handle< T > handle(T *t, Isolate *isolate)
static const int kCallerFPOffset
static const int kArgumentsLengthIndex
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
MemOperand FieldMemOperand(Register object, int offset)
static const int kFunctionOffset
ElementsKind GetInitialFastElementsKind()
static const int kFirstCaptureOffset
static const int kLastInputOffset
static void GenerateMapChangeElementsTransition(MacroAssembler *masm, AllocationSiteMode mode, Label *allocation_memento_found)
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
void CopyBytes(uint8_t *target, uint8_t *source)
static const int kHeaderSize
void InvokeAccessorGetterCallback(v8::Local< v8::String > property, const v8::PropertyCallbackInfo< v8::Value > &info, v8::AccessorGetterCallback getter)
static AllocationSiteMode GetMode(ElementsKind boilerplate_elements_kind)
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
void GenerateBody(MacroAssembler *masm, bool is_construct)
static const int kDataAsciiCodeOffset
ICCompareStub(Token::Value op, CompareIC::State left, CompareIC::State right, CompareIC::State handler)
static void GenerateStubsAheadOfTime(Isolate *isolate)
#define ASSERT_EQ(v1, v2)
static const int kElementsKindShift
CodeStubInterfaceDescriptor * GetInterfaceDescriptor(Isolate *isolate)
const uint32_t kOneByteStringTag
void Generate(MacroAssembler *masm)
static const int kArgumentsCalleeIndex
static const int kIsUndetectable
static const int kHeaderSize
static void InitializeForIsolate(Isolate *isolate)
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
static const int kDataTagOffset
static const uint32_t kHashBitMask
static const int kPrototypeOffset
void Generate(MacroAssembler *masm)
static const int kHashShift
static void GenerateAheadOfTime(Isolate *isolate)
const LowDwVfpRegister d1
const int kFailureTypeTagMask
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
static Representation Tagged()
static const int kNativeContextOffset
MemOperand ContextMemOperand(Register context, int index)
static void GenerateFixedRegStubsAheadOfTime(Isolate *isolate)
static const int kConstructStubOffset
static const int kExponentBits
static const int kCompilerHintsOffset
static Operand UntagSmi(Register smi)
static const int kSharedFunctionInfoOffset
static void GeneratePositiveLookup(MacroAssembler *masm, Label *miss, Label *done, Register elements, Register name, Register r0, Register r1)
static const int kSloppyArgumentsObjectSize
void InvokeFunctionCallback(const v8::FunctionCallbackInfo< v8::Value > &info, v8::FunctionCallback callback)
static const int kBitField2Offset
static const int kMantissaBits
void Generate(MacroAssembler *masm)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in name
CEntryStub(int result_size, SaveFPRegsMode save_doubles=kDontSaveFPRegs)
ElementsKind GetHoleyElementsKind(ElementsKind packed_kind)
static const int32_t kMaxOneByteCharCode
static const int kDataUC16CodeOffset
StoreBufferOverflowStub(SaveFPRegsMode save_fp)
static const int kStrictArgumentsObjectSize
MemOperand GlobalObjectMemOperand()
NameDictionaryLookupStub(LookupMode mode)
static void GenerateHashInit(MacroAssembler *masm, Register hash, Register character)
bool IsFastDoubleElementsKind(ElementsKind kind)
static bool IsOrderedRelationalCompareOp(Value op)
const uint32_t kStringEncodingMask
bool AreAliased(const CPURegister ®1, const CPURegister ®2, const CPURegister ®3=NoReg, const CPURegister ®4=NoReg, const CPURegister ®5=NoReg, const CPURegister ®6=NoReg, const CPURegister ®7=NoReg, const CPURegister ®8=NoReg)
static const int kInstanceTypeOffset
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
void Generate(MacroAssembler *masm)
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)