30 #if V8_TARGET_ARCH_ARM
43 CodeStubInterfaceDescriptor* descriptor) {
44 static Register registers[] = {
r2 };
45 descriptor->register_param_count_ = 1;
46 descriptor->register_params_ = registers;
47 descriptor->deoptimization_handler_ =
52 void FastNewContextStub::InitializeInterfaceDescriptor(
54 CodeStubInterfaceDescriptor* descriptor) {
55 static Register registers[] = {
r1 };
56 descriptor->register_param_count_ = 1;
57 descriptor->register_params_ = registers;
58 descriptor->deoptimization_handler_ =
NULL;
64 CodeStubInterfaceDescriptor* descriptor) {
65 static Register registers[] = {
r0 };
66 descriptor->register_param_count_ = 1;
67 descriptor->register_params_ = registers;
68 descriptor->deoptimization_handler_ =
NULL;
72 void NumberToStringStub::InitializeInterfaceDescriptor(
74 CodeStubInterfaceDescriptor* descriptor) {
75 static Register registers[] = {
r0 };
76 descriptor->register_param_count_ = 1;
77 descriptor->register_params_ = registers;
78 descriptor->deoptimization_handler_ =
85 CodeStubInterfaceDescriptor* descriptor) {
86 static Register registers[] = {
r3,
r2,
r1 };
87 descriptor->register_param_count_ = 3;
88 descriptor->register_params_ = registers;
89 descriptor->deoptimization_handler_ =
91 Runtime::kHiddenCreateArrayLiteralStubBailout)->
entry;
97 CodeStubInterfaceDescriptor* descriptor) {
98 static Register registers[] = {
r3,
r2,
r1,
r0 };
99 descriptor->register_param_count_ = 4;
100 descriptor->register_params_ = registers;
101 descriptor->deoptimization_handler_ =
108 CodeStubInterfaceDescriptor* descriptor) {
109 static Register registers[] = {
r2,
r3 };
110 descriptor->register_param_count_ = 2;
111 descriptor->register_params_ = registers;
112 descriptor->deoptimization_handler_ =
NULL;
118 CodeStubInterfaceDescriptor* descriptor) {
119 static Register registers[] = {
r1,
r0 };
120 descriptor->register_param_count_ = 2;
121 descriptor->register_params_ = registers;
122 descriptor->deoptimization_handler_ =
129 CodeStubInterfaceDescriptor* descriptor) {
130 static Register registers[] = {
r1,
r0 };
131 descriptor->register_param_count_ = 2;
132 descriptor->register_params_ = registers;
133 descriptor->deoptimization_handler_ =
138 void RegExpConstructResultStub::InitializeInterfaceDescriptor(
140 CodeStubInterfaceDescriptor* descriptor) {
141 static Register registers[] = {
r2,
r1,
r0 };
142 descriptor->register_param_count_ = 3;
143 descriptor->register_params_ = registers;
144 descriptor->deoptimization_handler_ =
151 CodeStubInterfaceDescriptor* descriptor) {
152 static Register registers[] = {
r0 };
153 descriptor->register_param_count_ = 1;
154 descriptor->register_params_ = registers;
155 descriptor->deoptimization_handler_ =
NULL;
161 CodeStubInterfaceDescriptor* descriptor) {
162 static Register registers[] = {
r1 };
163 descriptor->register_param_count_ = 1;
164 descriptor->register_params_ = registers;
165 descriptor->deoptimization_handler_ =
NULL;
171 CodeStubInterfaceDescriptor* descriptor) {
172 static Register registers[] = {
r0,
r2 };
173 descriptor->register_param_count_ = 2;
174 descriptor->register_params_ = registers;
175 descriptor->deoptimization_handler_ =
NULL;
181 CodeStubInterfaceDescriptor* descriptor) {
182 static Register registers[] = {
r1,
r0 };
183 descriptor->register_param_count_ = 2;
184 descriptor->register_params_ = registers;
185 descriptor->deoptimization_handler_ =
NULL;
191 CodeStubInterfaceDescriptor* descriptor) {
192 static Register registers[] = {
r2,
r1,
r0 };
193 descriptor->register_param_count_ = 3;
194 descriptor->register_params_ = registers;
195 descriptor->deoptimization_handler_ =
202 CodeStubInterfaceDescriptor* descriptor) {
203 static Register registers[] = {
r0,
r1 };
204 descriptor->register_param_count_ = 2;
205 descriptor->register_params_ = registers;
214 CodeStubInterfaceDescriptor* descriptor) {
215 static Register registers[] = {
r0 };
216 descriptor->register_param_count_ = 1;
217 descriptor->register_params_ = registers;
218 descriptor->deoptimization_handler_ =
220 descriptor->SetMissHandler(
221 ExternalReference(IC_Utility(IC::kCompareNilIC_Miss), isolate));
225 static void InitializeArrayConstructorDescriptor(
227 CodeStubInterfaceDescriptor* descriptor,
228 int constant_stack_parameter_count) {
233 static Register registers_variable_args[] = {
r1,
r2,
r0 };
234 static Register registers_no_args[] = {
r1,
r2 };
236 if (constant_stack_parameter_count == 0) {
237 descriptor->register_param_count_ = 2;
238 descriptor->register_params_ = registers_no_args;
242 descriptor->stack_parameter_count_ =
r0;
243 descriptor->register_param_count_ = 3;
244 descriptor->register_params_ = registers_variable_args;
247 descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count;
249 descriptor->deoptimization_handler_ =
254 static void InitializeInternalArrayConstructorDescriptor(
256 CodeStubInterfaceDescriptor* descriptor,
257 int constant_stack_parameter_count) {
261 static Register registers_variable_args[] = {
r1,
r0 };
262 static Register registers_no_args[] = {
r1 };
264 if (constant_stack_parameter_count == 0) {
265 descriptor->register_param_count_ = 1;
266 descriptor->register_params_ = registers_no_args;
270 descriptor->stack_parameter_count_ =
r0;
271 descriptor->register_param_count_ = 2;
272 descriptor->register_params_ = registers_variable_args;
275 descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count;
277 descriptor->deoptimization_handler_ =
284 CodeStubInterfaceDescriptor* descriptor) {
285 InitializeArrayConstructorDescriptor(isolate, descriptor, 0);
291 CodeStubInterfaceDescriptor* descriptor) {
292 InitializeArrayConstructorDescriptor(isolate, descriptor, 1);
298 CodeStubInterfaceDescriptor* descriptor) {
299 InitializeArrayConstructorDescriptor(isolate, descriptor, -1);
305 CodeStubInterfaceDescriptor* descriptor) {
306 static Register registers[] = {
r0 };
307 descriptor->register_param_count_ = 1;
308 descriptor->register_params_ = registers;
309 descriptor->deoptimization_handler_ =
311 descriptor->SetMissHandler(
312 ExternalReference(IC_Utility(IC::kToBooleanIC_Miss), isolate));
318 CodeStubInterfaceDescriptor* descriptor) {
319 InitializeInternalArrayConstructorDescriptor(isolate, descriptor, 0);
325 CodeStubInterfaceDescriptor* descriptor) {
326 InitializeInternalArrayConstructorDescriptor(isolate, descriptor, 1);
332 CodeStubInterfaceDescriptor* descriptor) {
333 InitializeInternalArrayConstructorDescriptor(isolate, descriptor, -1);
339 CodeStubInterfaceDescriptor* descriptor) {
340 static Register registers[] = {
r1,
r2,
r0 };
341 descriptor->register_param_count_ = 3;
342 descriptor->register_params_ = registers;
343 descriptor->deoptimization_handler_ =
350 CodeStubInterfaceDescriptor* descriptor) {
351 static Register registers[] = {
r0,
r3,
r1,
r2 };
352 descriptor->register_param_count_ = 4;
353 descriptor->register_params_ = registers;
354 descriptor->deoptimization_handler_ =
361 CodeStubInterfaceDescriptor* descriptor) {
362 static Register registers[] = {
r1,
r0 };
363 descriptor->register_param_count_ = 2;
364 descriptor->register_params_ = registers;
365 descriptor->deoptimization_handler_ =
FUNCTION_ADDR(BinaryOpIC_Miss);
366 descriptor->SetMissHandler(
367 ExternalReference(IC_Utility(IC::kBinaryOpIC_Miss), isolate));
371 void BinaryOpWithAllocationSiteStub::InitializeInterfaceDescriptor(
373 CodeStubInterfaceDescriptor* descriptor) {
374 static Register registers[] = {
r2,
r1,
r0 };
375 descriptor->register_param_count_ = 3;
376 descriptor->register_params_ = registers;
377 descriptor->deoptimization_handler_ =
382 void StringAddStub::InitializeInterfaceDescriptor(
384 CodeStubInterfaceDescriptor* descriptor) {
385 static Register registers[] = {
r1,
r0 };
386 descriptor->register_param_count_ = 2;
387 descriptor->register_params_ = registers;
388 descriptor->deoptimization_handler_ =
394 static PlatformCallInterfaceDescriptor default_descriptor =
397 static PlatformCallInterfaceDescriptor noInlineDescriptor =
401 CallInterfaceDescriptor* descriptor =
403 static Register registers[] = {
r1,
408 static Representation representations[] = {
414 descriptor->register_param_count_ = 4;
415 descriptor->register_params_ = registers;
416 descriptor->param_representations_ = representations;
417 descriptor->platform_specific_descriptor_ = &default_descriptor;
420 CallInterfaceDescriptor* descriptor =
422 static Register registers[] = {
cp,
425 static Representation representations[] = {
429 descriptor->register_param_count_ = 2;
430 descriptor->register_params_ = registers;
431 descriptor->param_representations_ = representations;
432 descriptor->platform_specific_descriptor_ = &noInlineDescriptor;
435 CallInterfaceDescriptor* descriptor =
437 static Register registers[] = {
cp,
440 static Representation representations[] = {
444 descriptor->register_param_count_ = 2;
445 descriptor->register_params_ = registers;
446 descriptor->param_representations_ = representations;
447 descriptor->platform_specific_descriptor_ = &noInlineDescriptor;
450 CallInterfaceDescriptor* descriptor =
452 static Register registers[] = {
cp,
455 static Representation representations[] = {
459 descriptor->register_param_count_ = 2;
460 descriptor->register_params_ = registers;
461 descriptor->param_representations_ = representations;
462 descriptor->platform_specific_descriptor_ = &default_descriptor;
465 CallInterfaceDescriptor* descriptor =
467 static Register registers[] = {
r0,
473 static Representation representations[] = {
480 descriptor->register_param_count_ = 5;
481 descriptor->register_params_ = registers;
482 descriptor->param_representations_ = representations;
483 descriptor->platform_specific_descriptor_ = &default_descriptor;
488 #define __ ACCESS_MASM(masm)
491 static void EmitIdenticalObjectComparison(MacroAssembler* masm,
494 static void EmitSmiNonsmiComparison(MacroAssembler* masm,
500 static void EmitStrictTwoHeapObjectCompare(MacroAssembler* masm,
505 void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm) {
507 Isolate* isolate = masm->isolate();
508 isolate->counters()->code_stubs()->Increment();
511 int param_count = descriptor->register_param_count_;
515 ASSERT(descriptor->register_param_count_ == 0 ||
516 r0.
is(descriptor->register_params_[param_count - 1]));
518 for (
int i = 0; i < param_count; ++i) {
519 __ push(descriptor->register_params_[i]);
521 ExternalReference miss = descriptor->miss_handler();
522 __ CallExternalReference(miss, descriptor->register_param_count_);
534 class ConvertToDoubleStub :
public PlatformCodeStub {
536 ConvertToDoubleStub(Register result_reg_1,
537 Register result_reg_2,
539 Register scratch_reg)
540 : result1_(result_reg_1),
541 result2_(result_reg_2),
543 zeros_(scratch_reg) { }
552 class ModeBits:
public BitField<OverwriteMode, 0, 2> {};
553 class OpBits:
public BitField<Token::Value, 2, 14> {};
555 Major MajorKey() {
return ConvertToDouble; }
558 return result1_.code() +
559 (result2_.code() << 4) +
560 (source_.code() << 8) +
561 (zeros_.code() << 12);
564 void Generate(MacroAssembler* masm);
568 void ConvertToDoubleStub::Generate(MacroAssembler* masm) {
569 Register exponent = result1_;
570 Register mantissa = result2_;
573 __ SmiUntag(source_);
580 __ rsb(source_, source_, Operand::Zero(),
LeaveCC,
ne);
585 __ cmp(source_, Operand(1));
586 __ b(
gt, ¬_special);
589 const uint32_t exponent_word_for_1 =
591 __ orr(exponent, exponent, Operand(exponent_word_for_1),
LeaveCC,
eq);
593 __ mov(mantissa, Operand::Zero());
596 __ bind(¬_special);
597 __ clz(zeros_, source_);
604 __ add(mantissa, mantissa, Operand(fudge));
607 Operand(mantissa,
LSL, HeapNumber::kExponentShift));
609 __ add(zeros_, zeros_, Operand(1));
611 __ mov(source_, Operand(source_,
LSL, zeros_));
623 Label out_of_range, only_low, negate, done;
624 Register input_reg =
source();
628 int double_offset =
offset();
633 Register scratch_low =
635 Register scratch_high =
639 __ Push(scratch_high, scratch_low, scratch);
643 __ vldr(double_scratch,
MemOperand(input_reg, double_offset));
644 __ vmov(scratch_low, scratch_high, double_scratch);
647 __ vcvt_s32_f64(double_scratch.low(), double_scratch);
648 __ vmov(result_reg, double_scratch.low());
651 __ sub(scratch, result_reg, Operand(1));
652 __ cmp(scratch, Operand(0x7ffffffe));
657 if (double_offset == 0) {
658 __ ldm(
ia, input_reg, scratch_low.bit() | scratch_high.bit());
660 __ ldr(scratch_low,
MemOperand(input_reg, double_offset));
665 __ Ubfx(scratch, scratch_high,
675 __ cmp(scratch, Operand(83));
676 __ b(
ge, &out_of_range);
683 __ rsb(scratch, scratch, Operand(51),
SetCC);
687 __ mov(scratch_low, Operand(scratch_low,
LSR, scratch));
691 __ rsb(scratch, scratch, Operand(32));
692 __ Ubfx(result_reg, scratch_high,
695 __ orr(result_reg, result_reg,
697 __ orr(result_reg, scratch_low, Operand(result_reg,
LSL, scratch));
700 __ bind(&out_of_range);
701 __ mov(result_reg, Operand::Zero());
707 __ rsb(scratch, scratch, Operand::Zero());
708 __ mov(result_reg, Operand(scratch_low,
LSL, scratch));
717 __ eor(result_reg, result_reg, Operand(scratch_high,
ASR, 31));
718 __ add(result_reg, result_reg, Operand(scratch_high,
LSR, 31));
722 __ Pop(scratch_high, scratch_low, scratch);
731 stub1.GetCode(isolate);
732 stub2.GetCode(isolate);
737 void WriteInt32ToHeapNumberStub::Generate(MacroAssembler* masm) {
738 Label max_negative_int;
743 __ cmp(the_int_, Operand(0x80000000u));
744 __ b(
eq, &max_negative_int);
747 uint32_t non_smi_exponent =
749 __ mov(scratch_, Operand(non_smi_exponent));
753 __ rsb(the_int_, the_int_, Operand::Zero(),
LeaveCC,
cs);
758 ASSERT(((1 << HeapNumber::kExponentShift) & non_smi_exponent) != 0);
760 __ orr(scratch_, scratch_, Operand(the_int_,
LSR, shift_distance));
763 __ mov(scratch_, Operand(the_int_,
LSL, 32 - shift_distance));
768 __ bind(&max_negative_int);
776 __ mov(
ip, Operand::Zero());
785 static void EmitIdenticalObjectComparison(MacroAssembler* masm,
789 Label heap_number, return_equal;
791 __ b(
ne, ¬_identical);
797 if (cond ==
lt || cond ==
gt) {
802 __ b(
eq, &heap_number);
810 if (cond ==
le || cond ==
ge) {
812 __ b(
ne, &return_equal);
813 __ LoadRoot(
r2, Heap::kUndefinedValueRootIndex);
815 __ b(
ne, &return_equal);
828 __ bind(&return_equal);
831 }
else if (cond ==
gt) {
841 if (cond !=
lt && cond !=
gt) {
842 __ bind(&heap_number);
853 __ cmp(
r3, Operand(-1));
854 __ b(
ne, &return_equal);
878 __ bind(¬_identical);
883 static void EmitSmiNonsmiComparison(MacroAssembler* masm,
890 (lhs.is(
r1) && rhs.is(
r0)));
893 __ JumpIfSmi(rhs, &rhs_is_smi);
913 __ SmiToDouble(
d7, lhs);
921 __ bind(&rhs_is_smi);
942 __ SmiToDouble(
d6, rhs);
948 static void EmitStrictTwoHeapObjectCompare(MacroAssembler* masm,
952 (lhs.is(
r1) && rhs.is(
r0)));
958 Label first_non_object;
962 __ b(
lt, &first_non_object);
965 Label return_not_equal;
966 __ bind(&return_not_equal);
969 __ bind(&first_non_object);
972 __ b(
eq, &return_not_equal);
975 __ b(
ge, &return_not_equal);
979 __ b(
eq, &return_not_equal);
986 __ b(
eq, &return_not_equal);
991 static void EmitCheckForTwoHeapNumbers(MacroAssembler* masm,
994 Label* both_loaded_as_doubles,
995 Label* not_heap_numbers,
998 (lhs.is(
r1) && rhs.is(
r0)));
1001 __ b(
ne, not_heap_numbers);
1010 __ jmp(both_loaded_as_doubles);
1015 static void EmitCheckForInternalizedStringsOrObjects(MacroAssembler* masm,
1018 Label* possible_strings,
1019 Label* not_both_strings) {
1021 (lhs.is(
r1) && rhs.is(
r0)));
1027 __ b(
ne, &object_test);
1029 __ b(
ne, possible_strings);
1031 __ b(
ge, not_both_strings);
1033 __ b(
ne, possible_strings);
1040 __ bind(&object_test);
1042 __ b(
lt, not_both_strings);
1044 __ b(
lt, not_both_strings);
1058 static void ICCompareStub_CheckInputType(MacroAssembler* masm,
1065 __ JumpIfNotSmi(input, fail);
1067 __ JumpIfSmi(input, &ok);
1068 __ CheckMap(input, scratch, Heap::kHeapNumberMapRootIndex, fail,
1080 void ICCompareStub::GenerateGeneric(MacroAssembler* masm) {
1086 ICCompareStub_CheckInputType(masm, lhs,
r2, left_, &miss);
1087 ICCompareStub_CheckInputType(masm, rhs,
r3, right_, &miss);
1090 Label not_smis, both_loaded_as_doubles, lhs_not_nan;
1092 Label not_two_smis, smi_done;
1094 __ JumpIfNotSmi(
r2, ¬_two_smis);
1098 __ bind(¬_two_smis);
1105 EmitIdenticalObjectComparison(masm, &slow, cc);
1111 __ and_(
r2, lhs, Operand(rhs));
1112 __ JumpIfNotSmi(
r2, ¬_smis);
1122 EmitSmiNonsmiComparison(masm, lhs, rhs, &lhs_not_nan, &slow, strict());
1124 __ bind(&both_loaded_as_doubles);
1127 Isolate* isolate = masm->isolate();
1128 __ bind(&lhs_not_nan);
1131 __ VFPCompareAndSetFlags(
d7,
d6);
1143 if (cc ==
lt || cc ==
le) {
1156 EmitStrictTwoHeapObjectCompare(masm, lhs, rhs);
1159 Label check_for_internalized_strings;
1160 Label flat_string_check;
1166 EmitCheckForTwoHeapNumbers(masm,
1169 &both_loaded_as_doubles,
1170 &check_for_internalized_strings,
1171 &flat_string_check);
1173 __ bind(&check_for_internalized_strings);
1176 if (cc ==
eq && !strict()) {
1180 EmitCheckForInternalizedStringsOrObjects(
1181 masm, lhs, rhs, &flat_string_check, &slow);
1186 __ bind(&flat_string_check);
1188 __ JumpIfNonSmisNotBothSequentialAsciiStrings(lhs, rhs,
r2,
r3, &slow);
1190 __ IncrementCounter(isolate->counters()->string_compare_native(), 1,
r2,
r3);
1215 native = strict() ? Builtins::STRICT_EQUALS : Builtins::EQUALS;
1219 if (cc ==
lt || cc ==
le) {
1244 const Register scratch =
r1;
1247 __ SaveFPRegs(
sp, scratch);
1249 const int argument_count = 1;
1250 const int fp_argument_count = 0;
1252 AllowExternalCallThatCantCauseGC scope(masm);
1253 __ PrepareCallCFunction(argument_count, fp_argument_count, scratch);
1254 __ mov(
r0, Operand(ExternalReference::isolate_address(masm->isolate())));
1256 ExternalReference::store_buffer_overflow_function(masm->isolate()),
1259 __ RestoreFPRegs(
sp, scratch);
1266 const Register base =
r1;
1267 const Register exponent =
r2;
1268 const Register heapnumbermap =
r5;
1269 const Register heapnumber =
r0;
1270 const DwVfpRegister double_base =
d0;
1271 const DwVfpRegister double_exponent =
d1;
1272 const DwVfpRegister double_result =
d2;
1273 const DwVfpRegister double_scratch =
d3;
1274 const SwVfpRegister single_scratch =
s6;
1275 const Register scratch =
r9;
1276 const Register scratch2 =
r4;
1278 Label call_runtime, done, int_exponent;
1280 Label base_is_smi, unpack_exponent;
1287 __ LoadRoot(heapnumbermap, Heap::kHeapNumberMapRootIndex);
1289 __ UntagAndJumpIfSmi(scratch, base, &base_is_smi);
1291 __ cmp(scratch, heapnumbermap);
1292 __ b(
ne, &call_runtime);
1295 __ jmp(&unpack_exponent);
1297 __ bind(&base_is_smi);
1298 __ vmov(single_scratch, scratch);
1299 __ vcvt_f64_s32(double_base, single_scratch);
1300 __ bind(&unpack_exponent);
1302 __ UntagAndJumpIfSmi(scratch, exponent, &int_exponent);
1305 __ cmp(scratch, heapnumbermap);
1306 __ b(
ne, &call_runtime);
1307 __ vldr(double_exponent,
1309 }
else if (exponent_type_ ==
TAGGED) {
1311 __ UntagAndJumpIfSmi(scratch, exponent, &int_exponent);
1313 __ vldr(double_exponent,
1317 if (exponent_type_ !=
INTEGER) {
1318 Label int_exponent_convert;
1320 __ vcvt_u32_f64(single_scratch, double_exponent);
1323 __ vcvt_f64_u32(double_scratch, single_scratch);
1324 __ VFPCompareAndSetFlags(double_scratch, double_exponent);
1325 __ b(
eq, &int_exponent_convert);
1331 Label not_plus_half;
1334 __ vmov(double_scratch, 0.5, scratch);
1335 __ VFPCompareAndSetFlags(double_exponent, double_scratch);
1336 __ b(
ne, ¬_plus_half);
1341 __ VFPCompareAndSetFlags(double_base, double_scratch);
1342 __ vneg(double_result, double_scratch,
eq);
1347 __ vsqrt(double_result, double_scratch);
1350 __ bind(¬_plus_half);
1351 __ vmov(double_scratch, -0.5, scratch);
1352 __ VFPCompareAndSetFlags(double_exponent, double_scratch);
1353 __ b(
ne, &call_runtime);
1358 __ VFPCompareAndSetFlags(double_base, double_scratch);
1364 __ vmov(double_result, 1.0, scratch);
1365 __ vsqrt(double_scratch, double_scratch);
1366 __ vdiv(double_result, double_result, double_scratch);
1372 AllowExternalCallThatCantCauseGC scope(masm);
1373 __ PrepareCallCFunction(0, 2, scratch);
1374 __ MovToFloatParameters(double_base, double_exponent);
1376 ExternalReference::power_double_double_function(masm->isolate()),
1380 __ MovFromFloatResult(double_result);
1383 __ bind(&int_exponent_convert);
1384 __ vcvt_u32_f64(single_scratch, double_exponent);
1385 __ vmov(scratch, single_scratch);
1389 __ bind(&int_exponent);
1392 if (exponent_type_ ==
INTEGER) {
1393 __ mov(scratch, exponent);
1396 __ mov(exponent, scratch);
1398 __ vmov(double_scratch, double_base);
1399 __ vmov(double_result, 1.0, scratch2);
1402 __ cmp(scratch, Operand::Zero());
1407 __ bind(&while_true);
1408 __ mov(scratch, Operand(scratch,
ASR, 1),
SetCC);
1409 __ vmul(double_result, double_result, double_scratch,
cs);
1410 __ vmul(double_scratch, double_scratch, double_scratch,
ne);
1411 __ b(
ne, &while_true);
1413 __ cmp(exponent, Operand::Zero());
1415 __ vmov(double_scratch, 1.0, scratch);
1416 __ vdiv(double_result, double_scratch, double_result);
1419 __ VFPCompareAndSetFlags(double_result, 0.0);
1423 __ vmov(single_scratch, exponent);
1424 __ vcvt_f64_s32(double_exponent, single_scratch);
1427 Counters* counters = masm->isolate()->counters();
1430 __ bind(&call_runtime);
1431 __ TailCallRuntime(Runtime::kMath_pow_cfunction, 2, 1);
1436 __ AllocateHeapNumber(
1437 heapnumber, scratch, scratch2, heapnumbermap, &call_runtime);
1438 __ vstr(double_result,
1441 __ IncrementCounter(counters->math_pow(), 1, scratch, scratch2);
1446 AllowExternalCallThatCantCauseGC scope(masm);
1447 __ PrepareCallCFunction(0, 2, scratch);
1448 __ MovToFloatParameters(double_base, double_exponent);
1450 ExternalReference::power_double_double_function(masm->isolate()),
1454 __ MovFromFloatResult(double_result);
1457 __ IncrementCounter(counters->math_pow(), 1, scratch, scratch2);
1463 bool CEntryStub::NeedsImmovableCode() {
1468 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
1476 BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate);
1480 void CodeStub::GenerateFPStubs(Isolate* isolate) {
1482 CEntryStub save_doubles(1, mode);
1483 StoreBufferOverflowStub stub(mode);
1487 Code* save_doubles_code;
1488 if (!save_doubles.FindCodeInCache(&save_doubles_code, isolate)) {
1489 save_doubles_code = *save_doubles.GetCode(isolate);
1491 Code* store_buffer_overflow_code;
1492 if (!stub.FindCodeInCache(&store_buffer_overflow_code, isolate)) {
1493 store_buffer_overflow_code = *stub.GetCode(isolate);
1495 isolate->set_fp_stubs_generated(
true);
1501 stub.GetCode(isolate);
1505 void CEntryStub::GenerateCore(MacroAssembler* masm,
1506 Label* throw_normal_exception,
1507 Label* throw_termination_exception,
1509 bool always_allocate) {
1514 Isolate* isolate = masm->isolate();
1518 __ PrepareCallCFunction(2, 0,
r1);
1519 __ mov(
r1, Operand(ExternalReference::isolate_address(masm->isolate())));
1520 __ CallCFunction(ExternalReference::perform_gc_function(isolate),
1524 ExternalReference scope_depth =
1525 ExternalReference::heap_always_allocate_scope_depth(isolate);
1526 if (always_allocate) {
1527 __ mov(
r0, Operand(scope_depth));
1529 __ add(
r1,
r1, Operand(1));
1538 #if V8_HOST_ARCH_ARM
1540 int frame_alignment_mask = frame_alignment - 1;
1541 if (FLAG_debug_code) {
1543 Label alignment_as_expected;
1545 __ tst(
sp, Operand(frame_alignment_mask));
1546 __ b(
eq, &alignment_as_expected);
1548 __ stop(
"Unexpected alignment");
1549 __ bind(&alignment_as_expected);
1554 __ mov(
r2, Operand(ExternalReference::isolate_address(isolate)));
1566 __ add(
lr,
pc, Operand(4));
1571 __ VFPEnsureFPSCRState(
r2);
1573 if (always_allocate) {
1576 __ mov(
r2, Operand(scope_depth));
1578 __ sub(
r3,
r3, Operand(1));
1583 Label failure_returned;
1586 __ add(
r2,
r0, Operand(1));
1588 __ b(
eq, &failure_returned);
1595 __ LeaveExitFrame(save_doubles_,
r4,
true);
1600 __ bind(&failure_returned);
1606 __ mov(
ip, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
1611 __ LoadRoot(
r3, Heap::kTheHoleValueRootIndex);
1612 __ mov(
ip, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
1618 __ LoadRoot(
r3, Heap::kTerminationExceptionRootIndex);
1620 __ b(
eq, throw_termination_exception);
1623 __ jmp(throw_normal_exception);
1651 FrameAndConstantPoolScope scope(masm, StackFrame::MANUAL);
1652 __ EnterExitFrame(save_doubles_);
1662 Label throw_normal_exception;
1663 Label throw_termination_exception;
1667 &throw_normal_exception,
1668 &throw_termination_exception,
1674 &throw_normal_exception,
1675 &throw_termination_exception,
1681 __ mov(
r0, Operand(reinterpret_cast<int32_t>(failure)));
1683 &throw_normal_exception,
1684 &throw_termination_exception,
1688 { FrameScope scope(masm, StackFrame::MANUAL);
1689 __ PrepareCallCFunction(0,
r0);
1691 ExternalReference::out_of_memory_function(masm->isolate()), 0, 0);
1694 __ bind(&throw_termination_exception);
1695 __ ThrowUncatchable(
r0);
1697 __ bind(&throw_normal_exception);
1709 Label invoke, handler_entry, exit;
1722 __ VFPEnsureFPSCRState(
r4);
1741 Isolate* isolate = masm->isolate();
1742 int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY;
1743 if (FLAG_enable_ool_constant_pool) {
1744 __ mov(
r8, Operand(isolate->factory()->empty_constant_pool_array()));
1749 Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate)));
1751 __ mov(
ip, Operand(-1));
1753 (FLAG_enable_ool_constant_pool ?
r8.
bit() : 0) |
1760 Label non_outermost_js;
1761 ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate);
1762 __ mov(
r5, Operand(ExternalReference(js_entry_sp)));
1764 __ cmp(
r6, Operand::Zero());
1765 __ b(
ne, &non_outermost_js);
1770 __ bind(&non_outermost_js);
1784 __ bind(&handler_entry);
1785 handler_offset_ = handler_entry.pos();
1790 __ mov(
ip, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
1801 __ PushTryHandler(StackHandler::JS_ENTRY, 0);
1808 __ mov(
r5, Operand(isolate->factory()->the_hole_value()));
1809 __ mov(
ip, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
1824 ExternalReference construct_entry(Builtins::kJSConstructEntryTrampoline,
1826 __ mov(
ip, Operand(construct_entry));
1828 ExternalReference entry(Builtins::kJSEntryTrampoline, isolate);
1829 __ mov(
ip, Operand(entry));
1842 Label non_outermost_js_2;
1845 __ b(
ne, &non_outermost_js_2);
1846 __ mov(
r6, Operand::Zero());
1847 __ mov(
r5, Operand(ExternalReference(js_entry_sp)));
1849 __ bind(&non_outermost_js_2);
1854 Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate)));
1862 if (FLAG_debug_code) {
1884 ASSERT(HasArgsInRegisters() || !HasCallSiteInlineCheck());
1886 ASSERT(!ReturnTrueFalseObject() || HasCallSiteInlineCheck());
1889 const Register
object =
r0;
1891 const Register
function =
r1;
1892 const Register prototype =
r4;
1893 const Register inline_site =
r9;
1894 const Register scratch =
r2;
1898 Label slow, loop, is_instance, is_not_instance, not_js_object;
1900 if (!HasArgsInRegisters()) {
1906 __ JumpIfSmi(
object, ¬_js_object);
1907 __ IsObjectJSObjectType(
object, map, scratch, ¬_js_object);
1911 if (!HasCallSiteInlineCheck()) {
1913 __ CompareRoot(
function, Heap::kInstanceofCacheFunctionRootIndex);
1915 __ CompareRoot(map, Heap::kInstanceofCacheMapRootIndex);
1917 __ LoadRoot(
r0, Heap::kInstanceofCacheAnswerRootIndex);
1918 __ Ret(HasArgsInRegisters() ? 0 : 2);
1924 __ TryGetFunctionPrototype(
function, prototype, scratch, &slow,
true);
1927 __ JumpIfSmi(prototype, &slow);
1928 __ IsObjectJSObjectType(prototype, scratch, scratch, &slow);
1932 if (!HasCallSiteInlineCheck()) {
1933 __ StoreRoot(
function, Heap::kInstanceofCacheFunctionRootIndex);
1934 __ StoreRoot(map, Heap::kInstanceofCacheMapRootIndex);
1936 ASSERT(HasArgsInRegisters());
1941 const Register offset =
r5;
1942 __ sub(inline_site,
lr, offset);
1944 __ GetRelocatedValueLocation(inline_site, offset);
1954 Register scratch2 =
map;
1958 __ LoadRoot(scratch2, Heap::kNullValueRootIndex);
1960 __ cmp(scratch, Operand(prototype));
1961 __ b(
eq, &is_instance);
1962 __ cmp(scratch, scratch2);
1963 __ b(
eq, &is_not_instance);
1968 __ bind(&is_instance);
1969 if (!HasCallSiteInlineCheck()) {
1971 __ StoreRoot(
r0, Heap::kInstanceofCacheAnswerRootIndex);
1974 __ LoadRoot(
r0, Heap::kTrueValueRootIndex);
1975 __ add(inline_site, inline_site, Operand(kDeltaToLoadBoolResult));
1977 __ GetRelocatedValueLocation(inline_site, scratch);
1980 if (!ReturnTrueFalseObject()) {
1984 __ Ret(HasArgsInRegisters() ? 0 : 2);
1986 __ bind(&is_not_instance);
1987 if (!HasCallSiteInlineCheck()) {
1989 __ StoreRoot(
r0, Heap::kInstanceofCacheAnswerRootIndex);
1992 __ LoadRoot(
r0, Heap::kFalseValueRootIndex);
1993 __ add(inline_site, inline_site, Operand(kDeltaToLoadBoolResult));
1995 __ GetRelocatedValueLocation(inline_site, scratch);
1998 if (!ReturnTrueFalseObject()) {
2002 __ Ret(HasArgsInRegisters() ? 0 : 2);
2004 Label object_not_null, object_not_null_or_smi;
2005 __ bind(¬_js_object);
2008 __ JumpIfSmi(
function, &slow);
2013 __ cmp(scratch, Operand(masm->isolate()->factory()->null_value()));
2014 __ b(
ne, &object_not_null);
2016 __ Ret(HasArgsInRegisters() ? 0 : 2);
2018 __ bind(&object_not_null);
2020 __ JumpIfNotSmi(
object, &object_not_null_or_smi);
2022 __ Ret(HasArgsInRegisters() ? 0 : 2);
2024 __ bind(&object_not_null_or_smi);
2026 __ IsObjectJSStringType(
object, scratch, &slow);
2028 __ Ret(HasArgsInRegisters() ? 0 : 2);
2032 if (!ReturnTrueFalseObject()) {
2033 if (HasArgsInRegisters()) {
2043 __ cmp(
r0, Operand::Zero());
2044 __ LoadRoot(
r0, Heap::kTrueValueRootIndex,
eq);
2045 __ LoadRoot(
r0, Heap::kFalseValueRootIndex,
ne);
2046 __ Ret(HasArgsInRegisters() ? 0 : 2);
2054 if (
kind() == Code::KEYED_LOAD_IC) {
2060 __ cmp(
r0, Operand(masm->isolate()->factory()->prototype_string()));
2074 StubCompiler::GenerateLoadFunctionPrototype(masm, receiver,
r3,
r4, &miss);
2076 StubCompiler::TailCallBuiltin(
2087 void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
2090 const int kDisplacement =
2095 __ JumpIfNotSmi(
r1, &slow);
2112 __ add(
r3,
fp, Operand::PointerOffsetFromSmiKey(
r3));
2126 __ add(
r3,
r2, Operand::PointerOffsetFromSmiKey(
r3));
2134 __ TailCallRuntime(Runtime::kGetArgumentsProperty, 1, 1);
2138 void ArgumentsAccessStub::GenerateNewSloppySlow(MacroAssembler* masm) {
2158 __ TailCallRuntime(Runtime::kHiddenNewArgumentsFast, 3, 1);
2162 void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) {
2176 Label adaptor_frame, try_allocate;
2180 __ b(
eq, &adaptor_frame);
2184 __ b(&try_allocate);
2187 __ bind(&adaptor_frame);
2199 __ bind(&try_allocate);
2203 const int kParameterMapHeaderSize =
2224 const int kNormalOffset =
2226 const int kAliasedOffset =
2231 __ cmp(
r1, Operand::Zero());
2248 const int kCalleeOffset = JSObject::kHeaderSize +
2254 const int kLengthOffset = JSObject::kHeaderSize +
2269 Label skip_parameter_map;
2274 __ b(
eq, &skip_parameter_map);
2276 __ LoadRoot(
r6, Heap::kSloppyArgumentsElementsMapRootIndex);
2282 __ add(
r6,
r6, Operand(kParameterMapHeaderSize));
2293 Label parameters_loop, parameters_test;
2298 __ LoadRoot(
r5, Heap::kTheHoleValueRootIndex);
2300 __ add(
r3,
r3, Operand(kParameterMapHeaderSize));
2309 __ jmp(¶meters_test);
2311 __ bind(¶meters_loop);
2319 __ bind(¶meters_test);
2321 __ b(
ne, ¶meters_loop);
2326 __ bind(&skip_parameter_map);
2332 __ LoadRoot(
r5, Heap::kFixedArrayMapRootIndex);
2336 Label arguments_loop, arguments_test;
2340 __ jmp(&arguments_test);
2342 __ bind(&arguments_loop);
2343 __ sub(
r4,
r4, Operand(kPointerSize));
2349 __ bind(&arguments_test);
2351 __ b(
lt, &arguments_loop);
2354 __ add(
sp,
sp, Operand(3 * kPointerSize));
2362 __ TailCallRuntime(Runtime::kHiddenNewArgumentsFast, 3, 1);
2366 void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
2371 Label adaptor_frame, try_allocate, runtime;
2375 __ b(
eq, &adaptor_frame);
2379 __ b(&try_allocate);
2382 __ bind(&adaptor_frame);
2385 __ add(
r3,
r2, Operand::PointerOffsetFromSmiKey(
r1));
2391 Label add_arguments_object;
2392 __ bind(&try_allocate);
2394 __ b(
eq, &add_arguments_object);
2396 __ bind(&add_arguments_object);
2410 __ CopyFields(
r0,
r4,
d0, JSObject::kHeaderSize / kPointerSize);
2420 __ cmp(
r1, Operand::Zero());
2430 __ LoadRoot(
r3, Heap::kFixedArrayMapRootIndex);
2445 __ sub(
r1,
r1, Operand(1));
2446 __ cmp(
r1, Operand::Zero());
2451 __ add(
sp,
sp, Operand(3 * kPointerSize));
2456 __ TailCallRuntime(Runtime::kHiddenNewStrictArgumentsFast, 3, 1);
2460 void RegExpExecStub::Generate(MacroAssembler* masm) {
2464 #ifdef V8_INTERPRETED_REGEXP
2465 __ TailCallRuntime(Runtime::kHiddenRegExpExec, 4, 1);
2466 #else // V8_INTERPRETED_REGEXP
2485 Register subject =
r4;
2486 Register regexp_data =
r5;
2487 Register last_match_info_elements =
no_reg;
2490 Isolate* isolate = masm->isolate();
2491 ExternalReference address_of_regexp_stack_memory_address =
2492 ExternalReference::address_of_regexp_stack_memory_address(isolate);
2493 ExternalReference address_of_regexp_stack_memory_size =
2494 ExternalReference::address_of_regexp_stack_memory_size(isolate);
2495 __ mov(
r0, Operand(address_of_regexp_stack_memory_size));
2497 __ cmp(
r0, Operand::Zero());
2502 __ JumpIfSmi(
r0, &runtime);
2508 if (FLAG_debug_code) {
2509 __ SmiTst(regexp_data);
2510 __ Check(
ne, kUnexpectedTypeForRegExpDataFixedArrayExpected);
2512 __ Check(
eq, kUnexpectedTypeForRegExpDataFixedArrayExpected);
2535 __ mov(
r9, Operand::Zero());
2537 __ JumpIfSmi(subject, &runtime);
2538 __ mov(
r3, subject);
2562 Label seq_string , external_string ,
2563 check_underlying , not_seq_nor_cons ,
2574 __ b(
eq, &seq_string);
2582 __ b(
ge, ¬_seq_nor_cons);
2587 __ CompareRoot(
r0, Heap::kempty_stringRootIndex);
2592 __ bind(&check_underlying);
2600 __ b(
ne, &external_string);
2603 __ bind(&seq_string);
2610 __ JumpIfNotSmi(
r1, &runtime);
2628 __ JumpIfSmi(
r6, &runtime);
2636 __ IncrementCounter(isolate->counters()->regexp_entry_native(), 1,
r0,
r2);
2639 const int kRegExpExecuteArguments = 9;
2640 const int kParameterRegisters = 4;
2641 __ EnterExitFrame(
false, kRegExpExecuteArguments - kParameterRegisters);
2647 __ mov(
r0, Operand(ExternalReference::isolate_address(isolate)));
2651 __ mov(
r0, Operand(1));
2655 __ mov(
r0, Operand(address_of_regexp_stack_memory_address));
2657 __ mov(
r2, Operand(address_of_regexp_stack_memory_size));
2664 __ mov(
r0, Operand::Zero());
2669 Operand(ExternalReference::address_of_static_offsets_vector(isolate)));
2675 __ eor(
r3,
r3, Operand(1));
2680 __ ldr(subject,
MemOperand(
fp, kSubjectOffset + 2 * kPointerSize));
2696 __ mov(
r0, subject);
2700 DirectCEntryStub stub;
2701 stub.GenerateCall(masm,
r6);
2703 __ LeaveExitFrame(
false,
no_reg,
true);
2705 last_match_info_elements =
r6;
2713 __ cmp(
r0, Operand(1));
2727 __ mov(
r1, Operand(isolate->factory()->the_hole_value()));
2728 __ mov(
r2, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
2737 __ CompareRoot(
r0, Heap::kTerminationExceptionRootIndex);
2739 Label termination_exception;
2740 __ b(
eq, &termination_exception);
2744 __ bind(&termination_exception);
2745 __ ThrowUncatchable(
r0);
2749 __ mov(
r0, Operand(masm->isolate()->factory()->null_value()));
2750 __ add(
sp,
sp, Operand(4 * kPointerSize));
2761 __ add(
r1,
r1, Operand(2));
2764 __ JumpIfSmi(
r0, &runtime);
2768 __ ldr(last_match_info_elements,
2771 __ CompareRoot(
r0, Heap::kFixedArrayMapRootIndex);
2778 __ cmp(
r2, Operand::SmiUntag(
r0));
2791 __ mov(
r2, subject);
2792 __ RecordWriteField(last_match_info_elements,
2798 __ mov(subject,
r2);
2802 __ RecordWriteField(last_match_info_elements,
2810 ExternalReference address_of_static_offsets_vector =
2811 ExternalReference::address_of_static_offsets_vector(isolate);
2812 __ mov(
r2, Operand(address_of_static_offsets_vector));
2816 Label next_capture, done;
2820 last_match_info_elements,
2822 __ bind(&next_capture);
2830 __ jmp(&next_capture);
2835 __ add(
sp,
sp, Operand(4 * kPointerSize));
2840 __ TailCallRuntime(Runtime::kHiddenRegExpExec, 4, 1);
2844 __ bind(¬_seq_nor_cons);
2846 __ b(
gt, ¬_long_external);
2849 __ bind(&external_string);
2852 if (FLAG_debug_code) {
2856 __ Assert(
eq, kExternalStringExpectedButNotFound);
2865 __ jmp(&seq_string);
2868 __ bind(¬_long_external);
2878 __ jmp(&check_underlying);
2879 #endif // V8_INTERPRETED_REGEXP
2883 static void GenerateRecordCallTarget(MacroAssembler* masm) {
2891 Label initialize, done, miss, megamorphic, not_array_function;
2894 masm->isolate()->heap()->megamorphic_symbol());
2896 masm->isolate()->heap()->uninitialized_symbol());
2899 __ add(
r4,
r2, Operand::PointerOffsetFromSmiKey(
r3));
2907 if (!FLAG_pretenuring_call_new) {
2913 __ CompareRoot(
r5, Heap::kAllocationSiteMapRootIndex);
2919 __ b(
ne, &megamorphic);
2927 __ CompareRoot(
r4, Heap::kUninitializedSymbolRootIndex);
2928 __ b(
eq, &initialize);
2931 __ bind(&megamorphic);
2932 __ add(
r4,
r2, Operand::PointerOffsetFromSmiKey(
r3));
2933 __ LoadRoot(
ip, Heap::kMegamorphicSymbolRootIndex);
2938 __ bind(&initialize);
2940 if (!FLAG_pretenuring_call_new) {
2944 __ b(
ne, ¬_array_function);
2956 CreateAllocationSiteStub create_stub;
2957 __ CallStub(&create_stub);
2964 __ bind(¬_array_function);
2967 __ add(
r4,
r2, Operand::PointerOffsetFromSmiKey(
r3));
2985 Label slow, non_function, wrap, cont;
2987 if (NeedsChecks()) {
2990 __ JumpIfSmi(
r1, &non_function);
2996 if (RecordCallTarget()) {
2997 GenerateRecordCallTarget(masm);
3001 __ LoadRoot(
r2, Heap::kUndefinedValueRootIndex);
3007 ParameterCount actual(argc_);
3009 if (CallAsMethod()) {
3010 if (NeedsChecks()) {
3026 if (NeedsChecks()) {
3027 __ JumpIfSmi(
r3, &wrap);
3038 if (NeedsChecks()) {
3041 if (RecordCallTarget()) {
3046 masm->isolate()->heap()->megamorphic_symbol());
3047 __ add(
r5,
r2, Operand::PointerOffsetFromSmiKey(
r3));
3048 __ LoadRoot(
ip, Heap::kMegamorphicSymbolRootIndex);
3053 __ b(
ne, &non_function);
3055 __ mov(
r0, Operand(argc_ + 1, RelocInfo::NONE32));
3056 __ mov(
r2, Operand::Zero());
3057 __ GetBuiltinFunction(
r1, Builtins::CALL_FUNCTION_PROXY);
3059 Handle<Code> adaptor =
3060 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline();
3061 __ Jump(adaptor, RelocInfo::CODE_TARGET);
3066 __ bind(&non_function);
3068 __ mov(
r0, Operand(argc_));
3069 __ mov(
r2, Operand::Zero());
3070 __ GetBuiltinFunction(
r1, Builtins::CALL_NON_FUNCTION);
3071 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
3072 RelocInfo::CODE_TARGET);
3075 if (CallAsMethod()) {
3095 Label slow, non_function_call;
3098 __ JumpIfSmi(
r1, &non_function_call);
3103 if (RecordCallTarget()) {
3104 GenerateRecordCallTarget(masm);
3106 __ add(
r5,
r2, Operand::PointerOffsetFromSmiKey(
r3));
3107 if (FLAG_pretenuring_call_new) {
3113 Label feedback_register_initialized;
3117 __ CompareRoot(
r5, Heap::kAllocationSiteMapRootIndex);
3118 __ b(
eq, &feedback_register_initialized);
3119 __ LoadRoot(
r2, Heap::kUndefinedValueRootIndex);
3120 __ bind(&feedback_register_initialized);
3123 __ AssertUndefinedOrAllocationSite(
r2,
r5);
3127 Register jmp_reg =
r4;
3139 __ b(
ne, &non_function_call);
3140 __ GetBuiltinFunction(
r1, Builtins::CALL_FUNCTION_PROXY_AS_CONSTRUCTOR);
3143 __ bind(&non_function_call);
3144 __ GetBuiltinFunction(
r1, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR);
3147 __ mov(
r2, Operand::Zero());
3148 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
3149 RelocInfo::CODE_TARGET);
3157 Label got_char_code;
3158 Label sliced_string;
3161 __ JumpIfSmi(object_, receiver_not_string_);
3168 __ b(
ne, receiver_not_string_);
3171 __ JumpIfNotSmi(index_, &index_not_smi_);
3172 __ bind(&got_smi_index_);
3176 __ cmp(
ip, Operand(index_));
3177 __ b(
ls, index_out_of_range_);
3179 __ SmiUntag(index_);
3193 MacroAssembler* masm,
3194 const RuntimeCallHelper& call_helper) {
3195 __ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase);
3198 __ bind(&index_not_smi_);
3202 Heap::kHeapNumberMapRootIndex,
3205 call_helper.BeforeCall(masm);
3209 __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero, 1);
3213 __ CallRuntime(Runtime::kHiddenNumberToSmi, 1);
3217 __ Move(index_,
r0);
3222 call_helper.AfterCall(masm);
3224 __ JumpIfNotSmi(index_, index_out_of_range_);
3226 __ jmp(&got_smi_index_);
3231 __ bind(&call_runtime_);
3232 call_helper.BeforeCall(masm);
3234 __ Push(object_, index_);
3235 __ CallRuntime(Runtime::kHiddenStringCharCodeAt, 2);
3236 __ Move(result_,
r0);
3237 call_helper.AfterCall(masm);
3240 __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase);
3255 __ b(
ne, &slow_case_);
3257 __ LoadRoot(result_, Heap::kSingleCharacterStringCacheRootIndex);
3259 __ add(result_, result_, Operand::PointerOffsetFromSmiKey(code_));
3261 __ CompareRoot(result_, Heap::kUndefinedValueRootIndex);
3262 __ b(
eq, &slow_case_);
3268 MacroAssembler* masm,
3269 const RuntimeCallHelper& call_helper) {
3270 __ Abort(kUnexpectedFallthroughToCharFromCodeSlowCase);
3272 __ bind(&slow_case_);
3273 call_helper.BeforeCall(masm);
3275 __ CallRuntime(Runtime::kCharFromCode, 1);
3276 __ Move(result_,
r0);
3277 call_helper.AfterCall(masm);
3280 __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase);
3284 enum CopyCharactersFlags {
3286 DEST_ALWAYS_ALIGNED = 2
3299 bool ascii = (flags & COPY_ASCII) != 0;
3300 bool dest_always_aligned = (flags & DEST_ALWAYS_ALIGNED) != 0;
3302 if (dest_always_aligned && FLAG_debug_code) {
3306 __ Check(
eq, kDestinationOfCopyNotAligned);
3309 const int kReadAlignment = 4;
3310 const int kReadAlignmentMask = kReadAlignment - 1;
3319 __ add(count, count, Operand(count),
SetCC);
3321 __ cmp(count, Operand::Zero());
3328 __ cmp(count, Operand(8));
3329 __ add(count, dest, Operand(count));
3330 Register limit = count;
3331 __ b(
lt, &byte_loop);
3333 if (!dest_always_aligned) {
3335 __ and_(scratch4, dest, Operand(kReadAlignmentMask),
SetCC);
3337 __ b(
eq, &dest_aligned);
3338 __ cmp(scratch4, Operand(2));
3345 __ bind(&dest_aligned);
3350 __ sub(scratch4, dest, Operand(src));
3351 __ and_(scratch4, scratch4, Operand(0x03),
SetCC);
3352 __ b(
eq, &simple_loop);
3360 __ mov(scratch4, Operand(scratch4,
LSL, 3));
3361 Register left_shift = scratch4;
3362 __ and_(src, src, Operand(~3));
3366 __ rsb(scratch2, left_shift, Operand(32));
3367 Register right_shift = scratch2;
3368 __ mov(scratch1, Operand(scratch1,
LSR, right_shift));
3372 __ orr(scratch1, scratch1, Operand(scratch3,
LSL, left_shift));
3374 __ mov(scratch1, Operand(scratch3,
LSR, right_shift));
3376 __ sub(scratch3, limit, Operand(dest));
3377 __ sub(scratch3, scratch3, Operand(4),
SetCC);
3387 __ add(scratch3, scratch3, Operand(4),
SetCC);
3389 __ cmp(scratch4, Operand(scratch3,
LSL, 3),
ne);
3394 __ cmp(scratch3, Operand(2));
3406 __ bind(&simple_loop);
3411 __ sub(scratch3, limit, Operand(dest));
3415 __ cmp(scratch3, Operand(8));
3420 __ bind(&byte_loop);
3421 __ cmp(dest, Operand(limit));
3433 Register character) {
3435 __ LoadRoot(hash, Heap::kHashSeedRootIndex);
3439 __ add(hash, hash, Operand(hash,
LSL, 10));
3441 __ eor(hash, hash, Operand(hash,
LSR, 6));
3447 Register character) {
3449 __ add(hash, hash, Operand(character));
3451 __ add(hash, hash, Operand(hash,
LSL, 10));
3453 __ eor(hash, hash, Operand(hash,
LSR, 6));
3460 __ add(hash, hash, Operand(hash,
LSL, 3));
3462 __ eor(hash, hash, Operand(hash,
LSR, 11));
3464 __ add(hash, hash, Operand(hash,
LSL, 15));
3473 void SubStringStub::Generate(MacroAssembler* masm) {
3522 __ cmp(
r2, Operand(1));
3523 __ b(
eq, &single_char);
3532 __ b(
eq, &return_r0);
3543 Label underlying_unpacked, sliced_string, seq_or_external_string;
3548 __ b(
eq, &seq_or_external_string);
3551 __ b(
ne, &sliced_string);
3554 __ CompareRoot(
r5, Heap::kempty_stringRootIndex);
3560 __ jmp(&underlying_unpacked);
3562 __ bind(&sliced_string);
3570 __ jmp(&underlying_unpacked);
3572 __ bind(&seq_or_external_string);
3576 __ bind(&underlying_unpacked);
3578 if (FLAG_string_slices) {
3586 __ b(
lt, ©_routine);
3592 Label two_byte_slice, set_slice_header;
3596 __ b(
eq, &two_byte_slice);
3597 __ AllocateAsciiSlicedString(
r0,
r2,
r6,
r4, &runtime);
3598 __ jmp(&set_slice_header);
3599 __ bind(&two_byte_slice);
3600 __ AllocateTwoByteSlicedString(
r0,
r2,
r6,
r4, &runtime);
3601 __ bind(&set_slice_header);
3607 __ bind(©_routine);
3614 Label two_byte_sequential, sequential_string, allocate_result;
3618 __ b(
eq, &sequential_string);
3627 __ jmp(&allocate_result);
3629 __ bind(&sequential_string);
3634 __ bind(&allocate_result);
3638 __ b(
eq, &two_byte_sequential);
3654 COPY_ASCII | DEST_ALWAYS_ALIGNED);
3658 __ bind(&two_byte_sequential);
3659 __ AllocateTwoByteString(
r0,
r2,
r4,
r6,
r1, &runtime);
3675 __ bind(&return_r0);
3676 Counters* counters = masm->isolate()->counters();
3677 __ IncrementCounter(counters->sub_string_native(), 1,
r3,
r4);
3683 __ TailCallRuntime(Runtime::kHiddenSubString, 3, 1);
3685 __ bind(&single_char);
3691 StringCharAtGenerator generator(
3693 generator.GenerateFast(masm);
3696 generator.SkipSlow(masm, &runtime);
3705 Register scratch3) {
3706 Register length = scratch1;
3709 Label strings_not_equal, check_zero_length;
3712 __ cmp(length, scratch2);
3713 __ b(
eq, &check_zero_length);
3714 __ bind(&strings_not_equal);
3719 Label compare_chars;
3720 __ bind(&check_zero_length);
3722 __ cmp(length, Operand::Zero());
3723 __ b(
ne, &compare_chars);
3728 __ bind(&compare_chars);
3729 GenerateAsciiCharsCompareLoop(masm,
3730 left, right, length, scratch2, scratch3,
3731 &strings_not_equal);
3745 Register scratch4) {
3746 Label result_not_equal, compare_lengths;
3750 __ sub(scratch3, scratch1, Operand(scratch2),
SetCC);
3751 Register length_delta = scratch3;
3753 Register min_length = scratch1;
3755 __ cmp(min_length, Operand::Zero());
3756 __ b(
eq, &compare_lengths);
3759 GenerateAsciiCharsCompareLoop(masm,
3760 left, right, min_length, scratch2, scratch4,
3764 __ bind(&compare_lengths);
3767 __ mov(
r0, Operand(length_delta),
SetCC);
3768 __ bind(&result_not_equal);
3777 void StringCompareStub::GenerateAsciiCharsCompareLoop(
3778 MacroAssembler* masm,
3784 Label* chars_not_equal) {
3788 __ SmiUntag(length);
3789 __ add(scratch1, length,
3791 __ add(left, left, Operand(scratch1));
3792 __ add(right, right, Operand(scratch1));
3793 __ rsb(length, length, Operand::Zero());
3794 Register index = length;
3801 __ cmp(scratch1, scratch2);
3802 __ b(
ne, chars_not_equal);
3803 __ add(index, index, Operand(1),
SetCC);
3808 void StringCompareStub::Generate(MacroAssembler* masm) {
3811 Counters* counters = masm->isolate()->counters();
3820 __ b(
ne, ¬_same);
3824 __ IncrementCounter(counters->string_compare_native(), 1,
r1,
r2);
3825 __ add(
sp,
sp, Operand(2 * kPointerSize));
3831 __ JumpIfNotBothSequentialAsciiStrings(
r1,
r0,
r2,
r3, &runtime);
3834 __ IncrementCounter(counters->string_compare_native(), 1,
r2,
r3);
3835 __ add(
sp,
sp, Operand(2 * kPointerSize));
3841 __ TailCallRuntime(Runtime::kHiddenStringCompare, 2, 1);
3846 Register receiver =
r0;
3847 Register scratch =
r1;
3849 int argc = arguments_count();
3859 Isolate* isolate = masm->isolate();
3862 __ TailCallExternalReference(
3863 ExternalReference(Builtins::c_ArrayPush, isolate), argc + 1, 1);
3867 Label call_builtin, attempt_to_grow_elements, with_write_barrier;
3869 Register elements =
r6;
3870 Register end_elements =
r5;
3876 __ CheckMap(elements,
3878 Heap::kFixedArrayMapRootIndex,
3891 __ cmp(scratch,
r4);
3893 const int kEndElementsOffset =
3897 __ b(
gt, &attempt_to_grow_elements);
3901 __ JumpIfNotSmi(
r4, &with_write_barrier);
3906 __ add(end_elements, elements, Operand::PointerOffsetFromSmiKey(scratch));
3910 __ cmp(scratch,
r4);
3911 __ b(
gt, &call_builtin);
3914 __ StoreNumberToDoubleElements(
r4, scratch, elements,
r5,
d0,
3915 &call_builtin, argc * kDoubleSize);
3921 __ mov(
r0, scratch);
3925 __ bind(&call_builtin);
3926 __ TailCallExternalReference(
3927 ExternalReference(Builtins::c_ArrayPush, isolate), argc + 1, 1);
3931 __ bind(&with_write_barrier);
3934 if (FLAG_trace_elements_transitions)
__ jmp(&call_builtin);
3937 __ LoadRoot(
ip, Heap::kHeapNumberMapRootIndex);
3939 __ b(
eq, &call_builtin);
3948 const int origin_offset = header_size + elements_kind() *
kPointerSize;
3952 __ b(
ne, &call_builtin);
3954 const int target_offset = header_size + target_kind *
kPointerSize;
3956 __ mov(
r2, receiver);
3967 __ add(end_elements, elements, Operand::PointerOffsetFromSmiKey(scratch));
3970 __ RecordWrite(elements,
3978 __ mov(
r0, scratch);
3981 __ bind(&attempt_to_grow_elements);
3984 if (!FLAG_inline_new) {
3985 __ bind(&call_builtin);
3986 __ TailCallExternalReference(
3987 ExternalReference(Builtins::c_ArrayPush, isolate), argc + 1, 1);
3995 __ JumpIfNotSmi(
r2, &call_builtin);
4001 ExternalReference new_space_allocation_top =
4002 ExternalReference::new_space_allocation_top_address(isolate);
4003 ExternalReference new_space_allocation_limit =
4004 ExternalReference::new_space_allocation_limit_address(isolate);
4006 const int kAllocationDelta = 4;
4007 ASSERT(kAllocationDelta >= argc);
4009 __ add(end_elements, elements, Operand::PointerOffsetFromSmiKey(scratch));
4010 __ add(end_elements, end_elements, Operand(kEndElementsOffset));
4011 __ mov(
r4, Operand(new_space_allocation_top));
4013 __ cmp(end_elements,
r3);
4014 __ b(
ne, &call_builtin);
4016 __ mov(
r9, Operand(new_space_allocation_limit));
4018 __ add(
r3,
r3, Operand(kAllocationDelta * kPointerSize));
4020 __ b(
hi, &call_builtin);
4028 __ LoadRoot(
r3, Heap::kTheHoleValueRootIndex);
4029 for (
int i = 1; i < kAllocationDelta; i++) {
4041 __ mov(
r0, scratch);
4044 __ bind(&call_builtin);
4045 __ TailCallExternalReference(
4046 ExternalReference(Builtins::c_ArrayPush, isolate), argc + 1, 1);
4050 void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) {
4056 Isolate* isolate = masm->isolate();
4061 __ Move(
r2,
handle(isolate->heap()->undefined_value()));
4064 if (FLAG_debug_code) {
4066 __ Assert(
ne, kExpectedAllocationSite);
4069 __ LoadRoot(
ip, Heap::kAllocationSiteMapRootIndex);
4072 __ Assert(
eq, kExpectedAllocationSite);
4077 BinaryOpWithAllocationSiteStub stub(state_);
4078 __ TailCallStub(&stub);
4082 void ICCompareStub::GenerateSmis(MacroAssembler* masm) {
4086 __ JumpIfNotSmi(
r2, &miss);
4088 if (GetCondition() ==
eq) {
4094 __ sub(
r0,
r1, Operand::SmiUntag(
r0));
4103 void ICCompareStub::GenerateNumbers(MacroAssembler* masm) {
4107 Label unordered, maybe_undefined1, maybe_undefined2;
4111 __ JumpIfNotSmi(
r1, &miss);
4114 __ JumpIfNotSmi(
r0, &miss);
4120 Label done, left, left_smi, right_smi;
4121 __ JumpIfSmi(
r0, &right_smi);
4122 __ CheckMap(
r0,
r2, Heap::kHeapNumberMapRootIndex, &maybe_undefined1,
4127 __ bind(&right_smi);
4131 __ JumpIfSmi(
r1, &left_smi);
4132 __ CheckMap(
r1,
r2, Heap::kHeapNumberMapRootIndex, &maybe_undefined2,
4142 __ VFPCompareAndSetFlags(
d0,
d1);
4145 __ b(
vs, &unordered);
4153 __ bind(&unordered);
4154 __ bind(&generic_stub);
4157 __ Jump(stub.GetCode(masm->isolate()), RelocInfo::CODE_TARGET);
4159 __ bind(&maybe_undefined1);
4161 __ CompareRoot(
r0, Heap::kUndefinedValueRootIndex);
4163 __ JumpIfSmi(
r1, &unordered);
4165 __ b(
ne, &maybe_undefined2);
4169 __ bind(&maybe_undefined2);
4171 __ CompareRoot(
r1, Heap::kUndefinedValueRootIndex);
4172 __ b(
eq, &unordered);
4180 void ICCompareStub::GenerateInternalizedStrings(MacroAssembler* masm) {
4186 Register right =
r0;
4191 __ JumpIfEitherSmi(left, right, &miss);
4199 __ orr(tmp1, tmp1, Operand(tmp2));
4204 __ cmp(left, right);
4218 void ICCompareStub::GenerateUniqueNames(MacroAssembler* masm) {
4225 Register right =
r0;
4230 __ JumpIfEitherSmi(left, right, &miss);
4239 __ JumpIfNotUniqueName(tmp1, &miss);
4240 __ JumpIfNotUniqueName(tmp2, &miss);
4243 __ cmp(left, right);
4257 void ICCompareStub::GenerateStrings(MacroAssembler* masm) {
4265 Register right =
r0;
4272 __ JumpIfEitherSmi(left, right, &miss);
4281 __ orr(tmp3, tmp1, tmp2);
4286 __ cmp(left, right);
4300 __ orr(tmp3, tmp1, Operand(tmp2));
4310 __ JumpIfBothInstanceTypesAreNotSequentialAscii(
4311 tmp1, tmp2, tmp3, tmp4, &runtime);
4316 masm, left, right, tmp1, tmp2, tmp3);
4319 masm, left, right, tmp1, tmp2, tmp3, tmp4);
4324 __ Push(left, right);
4326 __ TailCallRuntime(Runtime::kStringEquals, 2, 1);
4328 __ TailCallRuntime(Runtime::kHiddenStringCompare, 2, 1);
4336 void ICCompareStub::GenerateObjects(MacroAssembler* masm) {
4340 __ JumpIfSmi(
r2, &miss);
4356 void ICCompareStub::GenerateKnownObjects(MacroAssembler* masm) {
4359 __ JumpIfSmi(
r2, &miss);
4362 __ cmp(
r2, Operand(known_map_));
4364 __ cmp(
r3, Operand(known_map_));
4376 void ICCompareStub::GenerateMiss(MacroAssembler* masm) {
4379 ExternalReference miss =
4380 ExternalReference(IC_Utility(IC::kCompareIC_Miss), masm->isolate());
4387 __ CallExternalReference(miss, 3);
4404 __ VFPEnsureFPSCRState(
r2);
4412 reinterpret_cast<intptr_t
>(GetCode(masm->isolate()).location());
4413 __ Move(
ip, target);
4414 __ mov(
lr, Operand(code, RelocInfo::CODE_TARGET));
4423 Register properties,
4425 Register scratch0) {
4426 ASSERT(name->IsUniqueName());
4432 for (
int i = 0; i < kInlinedProbes; i++) {
4435 Register index = scratch0;
4438 __ sub(index, index, Operand(1));
4439 __ and_(index, index, Operand(
4440 Smi::FromInt(name->Hash() + NameDictionary::GetProbeOffset(i))));
4444 __ add(index, index, Operand(index,
LSL, 1));
4446 Register entity_name = scratch0;
4449 Register tmp = properties;
4450 __ add(tmp, properties, Operand(index,
LSL, 1));
4453 ASSERT(!tmp.is(entity_name));
4454 __ LoadRoot(tmp, Heap::kUndefinedValueRootIndex);
4455 __ cmp(entity_name, tmp);
4459 __ LoadRoot(tmp, Heap::kTheHoleValueRootIndex);
4462 __ cmp(entity_name, Operand(Handle<Name>(name)));
4466 __ cmp(entity_name, tmp);
4471 __ ldrb(entity_name,
4473 __ JumpIfNotUniqueName(entity_name, miss);
4481 const int spill_mask =
4487 __ mov(
r1, Operand(Handle<Name>(name)));
4490 __ cmp(
r0, Operand::Zero());
4508 Register scratch2) {
4509 ASSERT(!elements.is(scratch1));
4510 ASSERT(!elements.is(scratch2));
4511 ASSERT(!name.is(scratch1));
4512 ASSERT(!name.is(scratch2));
4514 __ AssertName(name);
4518 __ SmiUntag(scratch1);
4519 __ sub(scratch1, scratch1, Operand(1));
4524 for (
int i = 0; i < kInlinedProbes; i++) {
4531 ASSERT(NameDictionary::GetProbeOffset(i) <
4533 __ add(scratch2, scratch2, Operand(
4541 __ add(scratch2, scratch2, Operand(scratch2,
LSL, 1));
4544 __ add(scratch2, elements, Operand(scratch2,
LSL, 2));
4546 __ cmp(name, Operand(
ip));
4550 const int spill_mask =
4553 ~(scratch1.bit() | scratch2.bit());
4559 __ Move(
r0, elements);
4561 __ Move(
r0, elements);
4566 __ cmp(
r0, Operand::Zero());
4567 __ mov(scratch2, Operand(
r2));
4587 Register result =
r0;
4588 Register dictionary =
r0;
4590 Register index =
r2;
4593 Register undefined =
r5;
4594 Register entry_key =
r6;
4596 Label in_dictionary, maybe_in_dictionary, not_in_dictionary;
4600 __ sub(mask, mask, Operand(1));
4604 __ LoadRoot(undefined, Heap::kUndefinedValueRootIndex);
4606 for (
int i = kInlinedProbes; i < kTotalProbes; i++) {
4613 ASSERT(NameDictionary::GetProbeOffset(i) <
4615 __ add(index, hash, Operand(
4618 __ mov(index, Operand(hash));
4624 __ add(index, index, Operand(index,
LSL, 1));
4627 __ add(index, dictionary, Operand(index,
LSL, 2));
4631 __ cmp(entry_key, Operand(undefined));
4632 __ b(
eq, ¬_in_dictionary);
4635 __ cmp(entry_key, Operand(key));
4636 __ b(
eq, &in_dictionary);
4643 __ JumpIfNotUniqueName(entry_key, &maybe_in_dictionary);
4647 __ bind(&maybe_in_dictionary);
4652 __ mov(result, Operand::Zero());
4656 __ bind(&in_dictionary);
4657 __ mov(result, Operand(1));
4660 __ bind(¬_in_dictionary);
4661 __ mov(result, Operand::Zero());
4669 stub1.GetCode(isolate);
4672 stub2.GetCode(isolate);
4676 bool CodeStub::CanUseFPRegisters() {
4685 void RecordWriteStub::Generate(MacroAssembler* masm) {
4686 Label skip_to_incremental_noncompacting;
4687 Label skip_to_incremental_compacting;
4698 __ b(&skip_to_incremental_noncompacting);
4699 __ b(&skip_to_incremental_compacting);
4703 __ RememberedSetHelper(object_,
4711 __ bind(&skip_to_incremental_noncompacting);
4714 __ bind(&skip_to_incremental_compacting);
4726 void RecordWriteStub::GenerateIncremental(MacroAssembler* masm, Mode mode) {
4730 Label dont_need_remembered_set;
4732 __ ldr(regs_.scratch0(),
MemOperand(regs_.address(), 0));
4733 __ JumpIfNotInNewSpace(regs_.scratch0(),
4735 &dont_need_remembered_set);
4737 __ CheckPageFlag(regs_.object(),
4741 &dont_need_remembered_set);
4745 CheckNeedsToInformIncrementalMarker(
4746 masm, kUpdateRememberedSetOnNoNeedToInformIncrementalMarker, mode);
4747 InformIncrementalMarker(masm);
4748 regs_.Restore(masm);
4749 __ RememberedSetHelper(object_,
4755 __ bind(&dont_need_remembered_set);
4758 CheckNeedsToInformIncrementalMarker(
4759 masm, kReturnOnNoNeedToInformIncrementalMarker, mode);
4760 InformIncrementalMarker(masm);
4761 regs_.Restore(masm);
4766 void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm) {
4767 regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode_);
4768 int argument_count = 3;
4769 __ PrepareCallCFunction(argument_count, regs_.scratch0());
4771 r0.
is(regs_.address()) ? regs_.scratch0() : regs_.address();
4772 ASSERT(!address.is(regs_.object()));
4774 __ Move(address, regs_.address());
4775 __ Move(
r0, regs_.object());
4776 __ Move(
r1, address);
4777 __ mov(
r2, Operand(ExternalReference::isolate_address(masm->isolate())));
4779 AllowExternalCallThatCantCauseGC scope(masm);
4781 ExternalReference::incremental_marking_record_write_function(
4784 regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode_);
4788 void RecordWriteStub::CheckNeedsToInformIncrementalMarker(
4789 MacroAssembler* masm,
4790 OnNoNeedToInformIncrementalMarker on_no_need,
4793 Label need_incremental;
4794 Label need_incremental_pop_scratch;
4797 __ ldr(regs_.scratch1(),
4800 __ sub(regs_.scratch1(), regs_.scratch1(), Operand(1),
SetCC);
4801 __ str(regs_.scratch1(),
4804 __ b(
mi, &need_incremental);
4808 __ JumpIfBlack(regs_.object(), regs_.scratch0(), regs_.scratch1(), &on_black);
4810 regs_.Restore(masm);
4811 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
4812 __ RememberedSetHelper(object_,
4824 __ ldr(regs_.scratch0(),
MemOperand(regs_.address(), 0));
4827 Label ensure_not_white;
4829 __ CheckPageFlag(regs_.scratch0(),
4835 __ CheckPageFlag(regs_.object(),
4841 __ bind(&ensure_not_white);
4846 __ Push(regs_.object(), regs_.address());
4847 __ EnsureNotWhite(regs_.scratch0(),
4851 &need_incremental_pop_scratch);
4852 __ Pop(regs_.object(), regs_.address());
4854 regs_.Restore(masm);
4855 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
4856 __ RememberedSetHelper(object_,
4865 __ bind(&need_incremental_pop_scratch);
4866 __ Pop(regs_.object(), regs_.address());
4868 __ bind(&need_incremental);
4874 void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) {
4884 Label double_elements;
4886 Label slow_elements;
4887 Label fast_elements;
4894 __ CheckFastElements(
r2,
r5, &double_elements);
4896 __ JumpIfSmi(
r0, &smi_element);
4897 __ CheckFastSmiElements(
r2,
r5, &fast_elements);
4901 __ bind(&slow_elements);
4907 __ TailCallRuntime(Runtime::kStoreArrayLiteralElement, 5, 1);
4910 __ bind(&fast_elements);
4912 __ add(
r6,
r5, Operand::PointerOffsetFromSmiKey(
r3));
4922 __ bind(&smi_element);
4924 __ add(
r6,
r5, Operand::PointerOffsetFromSmiKey(
r3));
4929 __ bind(&double_elements);
4931 __ StoreNumberToDoubleElements(
r0,
r3,
r5,
r6,
d0, &slow_elements);
4936 void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
4938 __ Call(ces.GetCode(masm->isolate()), RelocInfo::CODE_TARGET);
4939 int parameter_count_offset =
4943 __ add(
r1,
r1, Operand(1));
4945 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE);
4953 if (masm->isolate()->function_entry_hook() !=
NULL) {
4963 void ProfileEntryHookStub::Generate(MacroAssembler* masm) {
4965 const int32_t kReturnAddressDistanceFromFunctionStart =
4977 const int32_t kNumSavedRegs = 7;
4985 __ sub(
r0,
lr, Operand(kReturnAddressDistanceFromFunctionStart));
4989 __ add(
r1,
sp, Operand(kNumSavedRegs * kPointerSize));
4992 int frame_alignment = masm->ActivationFrameAlignment();
4993 if (frame_alignment > kPointerSize) {
4996 __ and_(
sp,
sp, Operand(-frame_alignment));
4999 #if V8_HOST_ARCH_ARM
5001 reinterpret_cast<int32_t>(masm->isolate()->function_entry_hook());
5002 __ mov(
ip, Operand(entry_hook));
5007 __ mov(
r2, Operand(ExternalReference::isolate_address(masm->isolate())));
5010 __ mov(
ip, Operand(ExternalReference(&dispatcher,
5011 ExternalReference::BUILTIN_CALL,
5017 if (frame_alignment > kPointerSize) {
5027 static void CreateArrayDispatch(MacroAssembler* masm,
5031 __ TailCallStub(&stub);
5035 for (
int i = 0; i <= last_index; ++i) {
5037 __ cmp(
r3, Operand(kind));
5039 __ TailCallStub(&stub, eq);
5043 __ Abort(kUnexpectedElementsKindInArrayConstructor);
5050 static void CreateArrayDispatchOneArgument(MacroAssembler* masm,
5057 Label normal_sequence;
5067 __ tst(
r3, Operand(1));
5068 __ b(ne, &normal_sequence);
5073 __ cmp(
r5, Operand::Zero());
5074 __ b(eq, &normal_sequence);
5080 ArraySingleArgumentConstructorStub stub_holey(holey_initial,
5082 __ TailCallStub(&stub_holey);
5084 __ bind(&normal_sequence);
5085 ArraySingleArgumentConstructorStub stub(initial,
5087 __ TailCallStub(&stub);
5091 __ add(
r3,
r3, Operand(1));
5093 if (FLAG_debug_code) {
5095 __ CompareRoot(
r5, Heap::kAllocationSiteMapRootIndex);
5096 __ Assert(eq, kExpectedAllocationSite);
5107 __ bind(&normal_sequence);
5110 for (
int i = 0; i <= last_index; ++i) {
5112 __ cmp(
r3, Operand(kind));
5113 ArraySingleArgumentConstructorStub stub(kind);
5114 __ TailCallStub(&stub, eq);
5118 __ Abort(kUnexpectedElementsKindInArrayConstructor);
5126 static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) {
5129 for (
int i = 0; i <= to_index; ++i) {
5132 stub.GetCode(isolate);
5135 stub1.GetCode(isolate);
5142 ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>(
5144 ArrayConstructorStubAheadOfTimeHelper<ArraySingleArgumentConstructorStub>(
5146 ArrayConstructorStubAheadOfTimeHelper<ArrayNArgumentsConstructorStub>(
5154 for (
int i = 0; i < 2; i++) {
5156 InternalArrayNoArgumentConstructorStub stubh1(kinds[i]);
5157 stubh1.GetCode(isolate);
5158 InternalArraySingleArgumentConstructorStub stubh2(kinds[i]);
5159 stubh2.GetCode(isolate);
5160 InternalArrayNArgumentsConstructorStub stubh3(kinds[i]);
5161 stubh3.GetCode(isolate);
5166 void ArrayConstructorStub::GenerateDispatchToArrayStub(
5167 MacroAssembler* masm,
5169 if (argument_count_ ==
ANY) {
5170 Label not_zero_case, not_one_case;
5172 __ b(ne, ¬_zero_case);
5173 CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm,
mode);
5175 __ bind(¬_zero_case);
5176 __ cmp(
r0, Operand(1));
5177 __ b(
gt, ¬_one_case);
5178 CreateArrayDispatchOneArgument(masm, mode);
5180 __ bind(¬_one_case);
5181 CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm,
mode);
5182 }
else if (argument_count_ ==
NONE) {
5183 CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm,
mode);
5184 }
else if (argument_count_ ==
ONE) {
5185 CreateArrayDispatchOneArgument(masm, mode);
5187 CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm,
mode);
5203 if (FLAG_debug_code) {
5211 __ Assert(ne, kUnexpectedInitialMapForArrayFunction);
5213 __ Assert(eq, kUnexpectedInitialMapForArrayFunction);
5216 __ AssertUndefinedOrAllocationSite(
r2,
r4);
5221 __ CompareRoot(
r2, Heap::kUndefinedValueRootIndex);
5235 void InternalArrayConstructorStub::GenerateCase(
5237 __ cmp(
r0, Operand(1));
5239 InternalArrayNoArgumentConstructorStub stub0(kind);
5240 __ TailCallStub(&stub0,
lo);
5242 InternalArrayNArgumentsConstructorStub stubN(kind);
5243 __ TailCallStub(&stubN,
hi);
5249 __ cmp(
r3, Operand::Zero());
5251 InternalArraySingleArgumentConstructorStub
5253 __ TailCallStub(&stub1_holey, ne);
5256 InternalArraySingleArgumentConstructorStub stub1(kind);
5257 __ TailCallStub(&stub1);
5269 if (FLAG_debug_code) {
5277 __ Assert(ne, kUnexpectedInitialMapForArrayFunction);
5279 __ Assert(eq, kUnexpectedInitialMapForArrayFunction);
5290 if (FLAG_debug_code) {
5296 kInvalidElementsKindForInternalArrayOrInternalPackedArray);
5300 Label fast_elements_case;
5302 __ b(eq, &fast_elements_case);
5305 __ bind(&fast_elements_case);
5310 void CallApiFunctionStub::Generate(MacroAssembler* masm) {
5324 Register callee =
r0;
5325 Register call_data =
r4;
5326 Register holder =
r2;
5327 Register api_function_address =
r1;
5328 Register context =
cp;
5330 int argc = ArgumentBits::decode(bit_field_);
5331 bool is_store = IsStoreBits::decode(bit_field_);
5332 bool call_data_undefined = CallDataUndefinedBits::decode(bit_field_);
5334 typedef FunctionCallbackArguments FCA;
5345 Isolate* isolate = masm->isolate();
5358 Register scratch = call_data;
5359 if (!call_data_undefined) {
5360 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
5368 Operand(ExternalReference::isolate_address(isolate)));
5374 __ mov(scratch,
sp);
5378 const int kApiStackSpace = 4;
5380 FrameAndConstantPoolScope frame_scope(masm, StackFrame::MANUAL);
5381 __ EnterExitFrame(
false, kApiStackSpace);
5383 ASSERT(!api_function_address.is(
r0) && !scratch.is(
r0));
5386 __ add(
r0,
sp, Operand(1 * kPointerSize));
5390 __ add(
ip, scratch, Operand((FCA::kArgsLength - 1 + argc) * kPointerSize));
5393 __ mov(
ip, Operand(argc));
5396 __ mov(
ip, Operand::Zero());
5399 const int kStackUnwindSpace = argc + FCA::kArgsLength + 1;
5402 ApiFunction thunk_fun(thunk_address);
5403 ExternalReference thunk_ref = ExternalReference(&thunk_fun, thunk_type,
5406 AllowExternalCallThatCantCauseGC scope(masm);
5408 fp, (2 + FCA::kContextSaveIndex) * kPointerSize);
5410 int return_value_offset = 0;
5412 return_value_offset = 2 + FCA::kArgsLength;
5414 return_value_offset = 2 + FCA::kReturnValueOffset;
5416 MemOperand return_value_operand(
fp, return_value_offset * kPointerSize);
5418 __ CallApiFunctionAndReturn(api_function_address,
5421 return_value_operand,
5422 &context_restore_operand);
5426 void CallApiGetterStub::Generate(MacroAssembler* masm) {
5434 Register api_function_address =
r2;
5437 __ add(
r1,
r0, Operand(1 * kPointerSize));
5439 const int kApiStackSpace = 1;
5440 FrameAndConstantPoolScope frame_scope(masm, StackFrame::MANUAL);
5441 __ EnterExitFrame(
false, kApiStackSpace);
5446 __ add(
r1,
sp, Operand(1 * kPointerSize));
5452 ExternalReference::PROFILING_GETTER_CALL;
5453 ApiFunction thunk_fun(thunk_address);
5454 ExternalReference thunk_ref = ExternalReference(&thunk_fun, thunk_type,
5456 __ CallApiFunctionAndReturn(api_function_address,
5468 #endif // V8_TARGET_ARCH_ARM
static const int kResourceDataOffset
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter NULL
void GenerateFast(MacroAssembler *masm)
static const int kLengthOffset
static const int kHashFieldOffset
static const int kBitFieldOffset
void GenerateFast(MacroAssembler *masm)
STATIC_CHECK((kStringRepresentationMask|kStringEncodingMask)==Internals::kFullStringRepresentationMask)
static const int kArgsLength
const intptr_t kSmiTagMask
static int GetBranchOffset(Instr instr)
static const int kEvacuationCandidateMask
void GenerateSlow(MacroAssembler *masm, const RuntimeCallHelper &call_helper)
bool IsHoleyElementsKind(ElementsKind kind)
const RegList kCallerSaved
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
static const int kPrototypeOrInitialMapOffset
#define COMPARE(asm_, compare_string)
static const int kValueOffset
static int SlotOffset(int index)
static void GenerateAheadOfTime(Isolate *isolate)
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf map
static const uint32_t kMask
static const int kCallerStackParameterCountFrameOffset
void Generate(MacroAssembler *masm)
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
const uint32_t kTwoByteStringTag
const int kFailureTypeTagSize
const LowDwVfpRegister d0
static Failure * InternalError()
static Smi * FromInt(int value)
void Generate(MacroAssembler *masm)
static const int kElementsKindBitCount
static const int kDataOffset
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
static const int kJSRegexpStaticOffsetsVectorSize
static Representation Integer32()
static void GenerateAheadOfTime(Isolate *isolate)
static void GenerateHashGetHash(MacroAssembler *masm, Register hash)
TypeImpl< ZoneTypeConfig > Type
static const int kExponentBias
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor) V8_OVERRIDE
static const intptr_t kPageAlignmentMask
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
static Failure * Exception()
const uint32_t kIsNotInternalizedMask
AllocationSiteOverrideMode
static const Function * FunctionForId(FunctionId id)
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
#define ASSERT(condition)
void Generate(MacroAssembler *masm)
friend class BlockConstPoolScope
static const int kContextOffset
WriteInt32ToHeapNumberStub(Register the_int, Register the_heap_number, Register scratch)
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
static Handle< Object > MegamorphicSentinel(Isolate *isolate)
const int kPointerSizeLog2
static void GenerateCompareFlatAsciiStrings(MacroAssembler *masm, Register left, Register right, Register scratch1, Register scratch2, Register scratch3, Register scratch4)
#define kFirstCalleeSavedDoubleReg
const LowDwVfpRegister d3
const uint32_t kStringRepresentationMask
static const int kCallerFPOffset
static const int kEntrySize
const intptr_t kObjectAlignmentMask
MemOperand ContextOperand(Register context, int index)
static const int kContextOffset
const uint32_t kShortExternalStringMask
int GetSequenceIndexFromFastElementsKind(ElementsKind elements_kind)
static const int kLastSubjectOffset
static const int kZeroHash
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
void GenerateCall(MacroAssembler *masm, Register target)
static const int kLastCaptureCountOffset
static void GenerateCopyCharactersLong(MacroAssembler *masm, Register dest, Register src, Register count, Register scratch1, Register scratch2, Register scratch3, Register scratch4, int flags)
static const int kFirstOffset
static const int kMinLength
const int kNumDoubleCalleeSaved
const uint32_t kNotStringTag
static const int kParentOffset
static const int kNonMantissaBitsInTopWord
static const int kLiteralsOffset
const int kFastElementsKindPackedToHoley
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_string(expose_natives_as
static int ActivationFrameAlignment()
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
static const int kLengthOffset
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
const LowDwVfpRegister d7
static const int kExponentShift
const intptr_t kFailureTagMask
static const int kValueOffset
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in only print modified registers Don t break for ASM_UNIMPLEMENTED_BREAK macros print stack trace when an illegal exception is thrown randomize hashes to avoid predictable hash Fixed seed to use to hash property Print the time it takes to deserialize the snapshot testing_bool_flag testing_int_flag string flag tmp file in which to serialize heap Print the time it takes to lazily compile hydrogen code stubs concurrent_recompilation concurrent_sweeping Print usage including flags
const int kFailureTagSize
bool IsFastPackedElementsKind(ElementsKind kind)
static void GenerateFlatAsciiStringEquals(MacroAssembler *masm, Register left, Register right, Register scratch1, Register scratch2, Register scratch3)
static void GenerateFixedRegStubsAheadOfTime(Isolate *isolate)
#define kLastCalleeSavedDoubleReg
static const int kIrregexpCaptureCountOffset
static const size_t kWriteBarrierCounterOffset
const uint32_t kIsIndirectStringMask
ElementsKind GetFastElementsKindFromSequenceIndex(int sequence_number)
const LowDwVfpRegister d6
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
static const int kTransitionInfoOffset
#define kScratchDoubleReg
static void MaybeCallEntryHook(MacroAssembler *masm)
static void GenerateAheadOfTime(Isolate *isolate)
void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
const RegList kCalleeSaved
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
static const int kCallerSPOffset
static const uint32_t kShift
static const int kPropertiesOffset
static void PatchBranchIntoNop(MacroAssembler *masm, int pos)
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
virtual void Generate(MacroAssembler *masm)
bool IsFastSmiElementsKind(ElementsKind kind)
static const int kMinLength
const uint32_t kShortExternalStringTag
static void GenerateHashAddCharacter(MacroAssembler *masm, Register hash, Register character)
static void Generate(MacroAssembler *masm, Register string, Register index, Register result, Label *call_runtime)
static const int kHeaderSize
void Generate(MacroAssembler *masm)
static void GenerateStubsAheadOfTime(Isolate *isolate)
static const int kElementsOffset
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
const uint32_t kStringTag
void Generate(MacroAssembler *masm)
static bool IsEqualityOp(Value op)
static Representation External()
static const int kOffsetOffset
const uint32_t kInternalizedTag
static const int kLengthOffset
#define T(name, string, precedence)
static void GenerateNegativeLookup(MacroAssembler *masm, Label *miss, Label *done, Register receiver, Register properties, Handle< Name > name, Register scratch0)
void GenerateSlow(MacroAssembler *masm, const RuntimeCallHelper &call_helper)
virtual void Generate(MacroAssembler *masm)
bool IsFastSmiOrObjectElementsKind(ElementsKind kind)
static const int kLastMatchOverhead
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
static const int kHeaderSize
const intptr_t kPointerAlignmentMask
void Generate(MacroAssembler *masm)
static Builtins::Name MissBuiltin(Code::Kind kind)
static const int kMapOffset
static const int kMantissaBitsInTopWord
static const int kMaxShortLength
bool is(Register reg) const
static const int kSkipEvacuationSlotsRecordingMask
const uint32_t kIsNotStringMask
const LowDwVfpRegister d2
Register GetRegisterThatIsNotOneOf(Register reg1, Register reg2=no_reg, Register reg3=no_reg, Register reg4=no_reg, Register reg5=no_reg, Register reg6=no_reg)
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
const int kNumCalleeSaved
const uint32_t kSlicedNotConsMask
static const int kLengthOffset
void Generate(MacroAssembler *masm)
static const int kSecondOffset
static Handle< Object > UninitializedSentinel(Isolate *isolate)
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor) V8_OVERRIDE
Handle< T > handle(T *t, Isolate *isolate)
static const int kCallerFPOffset
static const int kArgumentsLengthIndex
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
MemOperand FieldMemOperand(Register object, int offset)
const intptr_t kObjectAlignment
static const int kFunctionOffset
ElementsKind GetInitialFastElementsKind()
static const int kFirstCaptureOffset
static const uint32_t kSignMask
static const int kLastInputOffset
static void GenerateMapChangeElementsTransition(MacroAssembler *masm, AllocationSiteMode mode, Label *allocation_memento_found)
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
static const int kHeaderSize
void InvokeAccessorGetterCallback(v8::Local< v8::String > property, const v8::PropertyCallbackInfo< v8::Value > &info, v8::AccessorGetterCallback getter)
static AllocationSiteMode GetMode(ElementsKind boilerplate_elements_kind)
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
void GenerateBody(MacroAssembler *masm, bool is_construct)
static const int kDataAsciiCodeOffset
ICCompareStub(Token::Value op, CompareIC::State left, CompareIC::State right, CompareIC::State handler)
static void GenerateStubsAheadOfTime(Isolate *isolate)
Condition NegateCondition(Condition cond)
#define ASSERT_EQ(v1, v2)
static const int kElementsKindShift
CodeStubInterfaceDescriptor * GetInterfaceDescriptor(Isolate *isolate)
const uint32_t kOneByteStringTag
void Generate(MacroAssembler *masm)
static const int kArgumentsCalleeIndex
static const int kIsUndetectable
static const int kHeaderSize
static void InitializeForIsolate(Isolate *isolate)
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
static const int kInstrSize
static const int kDataTagOffset
static const uint32_t kHashBitMask
static const int kPrototypeOffset
void Generate(MacroAssembler *masm)
static const int kHashShift
static void GenerateAheadOfTime(Isolate *isolate)
const LowDwVfpRegister d1
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
static Representation Tagged()
static const int kNativeContextOffset
static void GenerateFixedRegStubsAheadOfTime(Isolate *isolate)
static const int kConstructStubOffset
static const int kExponentBits
static const int kCompilerHintsOffset
static const int kSharedFunctionInfoOffset
static void GeneratePositiveLookup(MacroAssembler *masm, Label *miss, Label *done, Register elements, Register name, Register r0, Register r1)
static const int kSloppyArgumentsObjectSize
void InvokeFunctionCallback(const v8::FunctionCallbackInfo< v8::Value > &info, v8::FunctionCallback callback)
static const int kBitField2Offset
void Generate(MacroAssembler *masm)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in name
CEntryStub(int result_size, SaveFPRegsMode save_doubles=kDontSaveFPRegs)
ElementsKind GetHoleyElementsKind(ElementsKind packed_kind)
static const int32_t kMaxOneByteCharCode
static const int kExponentOffset
static const int kDataUC16CodeOffset
StoreBufferOverflowStub(SaveFPRegsMode save_fp)
static const int kStrictArgumentsObjectSize
NameDictionaryLookupStub(LookupMode mode)
static void GenerateHashInit(MacroAssembler *masm, Register hash, Register character)
bool IsFastDoubleElementsKind(ElementsKind kind)
static bool IsOrderedRelationalCompareOp(Value op)
const uint32_t kStringEncodingMask
static const int kInstanceTypeOffset
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)
void Generate(MacroAssembler *masm)
static const int kMantissaOffset
virtual void InitializeInterfaceDescriptor(Isolate *isolate, CodeStubInterfaceDescriptor *descriptor)