30 #if V8_TARGET_ARCH_ARM64
43 #define __ ACCESS_MASM(masm)
47 static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
60 static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
72 void Builtins::Generate_Adaptor(MacroAssembler* masm,
87 int num_extra_args = 0;
97 __ Add(x0, x0, num_extra_args + 1);
98 __ JumpToExternalReference(ExternalReference(
id, masm->isolate()));
102 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
109 Label generic_array_code;
112 GenerateLoadInternalArrayFunction(masm, x1);
114 if (FLAG_debug_code) {
118 __ Assert(
ne, kUnexpectedInitialMapForInternalArrayFunction);
119 __ CompareObjectType(x10, x11, x12,
MAP_TYPE);
120 __ Assert(
eq, kUnexpectedInitialMapForInternalArrayFunction);
125 InternalArrayConstructorStub stub(masm->isolate());
126 __ TailCallStub(&stub);
130 void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
137 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
140 GenerateLoadArrayFunction(masm, x1);
142 if (FLAG_debug_code) {
146 __ Assert(
ne, kUnexpectedInitialMapForArrayFunction);
147 __ CompareObjectType(x10, x11, x12,
MAP_TYPE);
148 __ Assert(
eq, kUnexpectedInitialMapForArrayFunction);
152 __ LoadRoot(x2, Heap::kUndefinedValueRootIndex);
153 ArrayConstructorStub stub(masm->isolate());
154 __ TailCallStub(&stub);
158 void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
167 Counters* counters = masm->isolate()->counters();
168 __ IncrementCounter(counters->string_ctor_calls(), 1, x10, x11);
171 Register
function = x1;
172 if (FLAG_debug_code) {
174 __ Cmp(
function, x10);
175 __ Assert(
eq, kUnexpectedStringFunction);
180 __ Cbz(argc, &no_arguments);
182 __ Sub(argc, argc, 1);
189 Register argument = x2;
190 Label not_cached, argument_is_string;
191 __ LookupNumberStringCache(arg,
197 __ IncrementCounter(counters->string_ctor_cached_number(), 1, x10, x11);
198 __ Bind(&argument_is_string);
207 Register new_obj = x0;
212 __ LoadGlobalFunctionInitialMap(
function, map, x10);
213 if (FLAG_debug_code) {
216 __ Assert(
eq, kUnexpectedStringWrapperInstanceSize);
219 __ Assert(
eq, kUnexpectedUnusedPropertiesOfStringWrapper);
224 __ LoadRoot(empty, Heap::kEmptyFixedArrayRootIndex);
237 Label convert_argument;
238 __ Bind(¬_cached);
239 __ JumpIfSmi(arg, &convert_argument);
245 __ Mov(argument, arg);
246 __ IncrementCounter(counters->string_ctor_string_value(), 1, x10, x11);
247 __ B(&argument_is_string);
250 __ Bind(&convert_argument);
252 __ IncrementCounter(counters->string_ctor_conversions(), 1, x10, x11);
259 __ Mov(argument, x0);
260 __ B(&argument_is_string);
264 __ Bind(&no_arguments);
265 __ LoadRoot(argument, Heap::kempty_stringRootIndex);
267 __ B(&argument_is_string);
271 __ Bind(&gc_required);
272 __ IncrementCounter(counters->string_ctor_gc_required(), 1, x10, x11);
276 __ CallRuntime(Runtime::kNewStringWrapper, 1);
282 static void CallRuntimePassFunction(MacroAssembler* masm,
289 __ CallRuntime(function_id, 1);
296 static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
304 static void GenerateTailCallToReturnedCode(MacroAssembler* masm) {
310 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
317 __ CompareRoot(masm->StackPointer(), Heap::kStackLimitRootIndex);
320 CallRuntimePassFunction(masm, Runtime::kHiddenTryInstallOptimizedCode);
321 GenerateTailCallToReturnedCode(masm);
324 GenerateTailCallToSharedCode(masm);
328 static void Generate_JSConstructStubHelper(MacroAssembler* masm,
329 bool is_api_function,
330 bool count_constructions,
331 bool create_memento) {
340 ASM_LOCATION(
"Builtins::Generate_JSConstructStubHelper");
342 ASSERT(!is_api_function || !count_constructions);
344 ASSERT(!is_api_function || !create_memento);
346 ASSERT(!count_constructions || !create_memento);
348 Isolate* isolate = masm->isolate();
352 FrameScope scope(masm, StackFrame::CONSTRUCT);
355 if (create_memento) {
356 __ AssertUndefinedOrAllocationSite(x2, x10);
361 Register constructor = x1;
364 __ Push(argc, constructor);
370 Label rt_call, allocated;
371 if (FLAG_inline_new) {
372 Label undo_allocation;
373 #if ENABLE_DEBUGGER_SUPPORT
374 ExternalReference debug_step_in_fp =
375 ExternalReference::debug_step_in_fp_address(isolate);
376 __ Mov(x2, Operand(debug_step_in_fp));
378 __ Cbnz(x2, &rt_call);
381 Register init_map = x2;
385 __ JumpIfSmi(init_map, &rt_call);
386 __ JumpIfNotObjectType(init_map, x10, x11,
MAP_TYPE, &rt_call);
394 if (count_constructions) {
401 __ Ldrb(x4, constructor_count);
403 __ Strb(x4, constructor_count);
408 __ Push(constructor, init_map, constructor);
410 __ CallRuntime(Runtime::kHiddenFinalizeInstanceSize, 1);
411 __ Pop(init_map, constructor);
416 Register obj_size = x3;
417 Register new_obj = x4;
419 if (create_memento) {
431 __ LoadRoot(empty, Heap::kEmptyFixedArrayRootIndex);
437 Register first_prop = x5;
442 __ LoadRoot(undef, Heap::kUndefinedValueRootIndex);
446 Register prealloc_fields = x10;
447 Register inobject_props = x11;
448 Register inst_sizes = x11;
450 __ Ubfx(prealloc_fields, inst_sizes,
453 __ Ubfx(inobject_props, inst_sizes,
457 Register prop_fields = x6;
460 if (count_constructions) {
462 __ FillFields(first_prop, prealloc_fields, undef);
466 Register first_non_prealloc = x12;
467 __ Add(first_non_prealloc, first_prop,
472 if (FLAG_debug_code) {
473 Register obj_end = x5;
475 __ Cmp(first_non_prealloc, obj_end);
476 __ Assert(
le, kUnexpectedNumberOfPreAllocatedPropertyFields);
480 Register one_pointer_filler = x5;
481 Register non_prealloc_fields = x6;
482 __ LoadRoot(one_pointer_filler, Heap::kOnePointerFillerMapRootIndex);
483 __ Sub(non_prealloc_fields, prop_fields, prealloc_fields);
484 __ FillFields(first_non_prealloc, non_prealloc_fields,
487 }
else if (create_memento) {
489 __ FillFields(first_prop, prop_fields, undef);
491 __ LoadRoot(x14, Heap::kAllocationMementoMapRootIndex);
501 __ FillFields(first_prop, prop_fields, undef);
514 Register element_count = x3;
515 __ Ldrb(element_count,
519 __ Add(element_count, element_count, prealloc_fields);
520 __ Subs(element_count, element_count, inobject_props);
523 __ B(
eq, &allocated);
524 __ Assert(
pl, kPropertyAllocationCountFailed);
528 Register new_array = x5;
529 Register array_size = x6;
531 __ Allocate(array_size, new_array, x11, x12, &undo_allocation,
535 Register array_map = x10;
536 __ LoadRoot(array_map, Heap::kFixedArrayMapRootIndex);
538 __ SmiTag(x0, element_count);
542 Register elements = x10;
544 __ FillFields(elements, element_count, undef);
557 __ Bind(&undo_allocation);
558 __ UndoAllocationInNewSpace(new_obj, x14);
563 Label count_incremented;
564 if (create_memento) {
568 __ Push(constructor);
569 __ CallRuntime(Runtime::kHiddenNewObjectWithAllocationSite, 2);
574 __ jmp(&count_incremented);
576 __ Push(constructor);
577 __ CallRuntime(Runtime::kHiddenNewObject, 1);
585 if (create_memento) {
587 __ JumpIfRoot(x10, Heap::kUndefinedValueRootIndex, &count_incremented);
595 __ bind(&count_incremented);
624 Label loop, entry, done_copying_arguments;
634 __ B(
eq, &done_copying_arguments);
636 __ Bind(&done_copying_arguments);
641 if (is_api_function) {
644 masm->isolate()->builtins()->HandleApiCallConstruct();
645 __ Call(code, RelocInfo::CODE_TARGET);
647 ParameterCount actual(argc);
648 __ InvokeFunction(constructor, actual,
CALL_FUNCTION, NullCallWrapper());
652 if (!is_api_function && !count_constructions) {
653 masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
666 Label use_receiver, exit;
673 __ JumpIfSmi(x0, &use_receiver);
681 __ Bind(&use_receiver);
698 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, x1, x2);
703 void Builtins::Generate_JSConstructStubCountdown(MacroAssembler* masm) {
704 Generate_JSConstructStubHelper(masm,
false,
true,
false);
708 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
709 Generate_JSConstructStubHelper(masm,
false,
false, FLAG_pretenuring_call_new);
713 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
714 Generate_JSConstructStubHelper(masm,
true,
false,
false);
726 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
729 Register
function = x1;
730 Register receiver = x2;
746 __ InitializeRootRegister();
749 __ Push(
function, receiver);
770 __ LoadRoot(x19, Heap::kUndefinedValueRootIndex);
786 __ LoadRoot(x2, Heap::kUndefinedValueRootIndex);
791 ParameterCount actual(x0);
803 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
804 Generate_JSEntryTrampolineHelper(masm,
false);
808 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
809 Generate_JSEntryTrampolineHelper(masm,
true);
813 void Builtins::Generate_CompileUnoptimized(MacroAssembler* masm) {
814 CallRuntimePassFunction(masm, Runtime::kHiddenCompileUnoptimized);
815 GenerateTailCallToReturnedCode(masm);
819 static void CallCompileOptimized(MacroAssembler* masm,
bool concurrent) {
821 Register
function = x1;
825 __ LoadObject(x10, masm->isolate()->factory()->ToBoolean(concurrent));
826 __ Push(
function,
function, x10);
828 __ CallRuntime(Runtime::kHiddenCompileOptimized, 2);
835 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
836 CallCompileOptimized(masm,
false);
837 GenerateTailCallToReturnedCode(masm);
841 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
842 CallCompileOptimized(masm,
true);
843 GenerateTailCallToReturnedCode(masm);
847 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
862 FrameScope scope(masm, StackFrame::MANUAL);
864 __ Mov(x1, ExternalReference::isolate_address(masm->isolate()));
866 ExternalReference::get_make_code_young_function(masm->isolate()), 2);
875 #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \
876 void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \
877 MacroAssembler* masm) { \
878 GenerateMakeCodeYoungAgainCommon(masm); \
880 void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \
881 MacroAssembler* masm) { \
882 GenerateMakeCodeYoungAgainCommon(masm); \
885 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
888 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
902 FrameScope scope(masm, StackFrame::MANUAL);
904 __ Mov(x1, ExternalReference::isolate_address(masm->isolate()));
906 ExternalReference::get_mark_code_as_executed_function(
907 masm->isolate()), 2);
911 __ EmitFrameSetupForCodeAgePatching(masm);
915 __ Add(x0, x0, kCodeAgeSequenceSize);
920 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
921 GenerateMakeCodeYoungAgainCommon(masm);
925 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
938 __ CallRuntime(Runtime::kHiddenNotifyStubFailure, 0, save_doubles);
951 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
956 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
957 Generate_NotifyStubFailureHelper(masm,
kSaveFPRegs);
961 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
968 __ CallRuntime(Runtime::kHiddenNotifyDeoptimized, 1);
977 Label with_tos_register, unknown_state;
983 __ Bind(&with_tos_register);
990 __ Bind(&unknown_state);
991 __ Abort(kInvalidFullCodegenState);
995 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1000 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
1005 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
1010 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1017 __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
1046 void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) {
1049 __ CompareRoot(jssp, Heap::kStackLimitRootIndex);
1053 __ CallRuntime(Runtime::kHiddenStackGuard, 0);
1055 __ Jump(masm->isolate()->builtins()->OnStackReplacement(),
1056 RelocInfo::CODE_TARGET);
1063 void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
1065 call_type_JS_func = 0,
1066 call_type_func_proxy = 1,
1067 call_type_non_func = 2
1070 Register
function = x1;
1071 Register call_type = x4;
1072 Register scratch1 = x10;
1073 Register scratch2 = x11;
1074 Register receiver_type = x13;
1079 __ Cbnz(argc, &done);
1080 __ LoadRoot(scratch1, Heap::kUndefinedValueRootIndex);
1088 Label slow, non_function;
1090 __ JumpIfSmi(
function, &non_function);
1091 __ JumpIfNotObjectType(
function, scratch1, receiver_type,
1095 Label shift_arguments;
1096 __ Mov(call_type, static_cast<int>(call_type_JS_func));
1097 { Label convert_to_object, use_global_receiver, patch_receiver;
1106 __ Ldr(scratch2.W(),
1108 __ TestAndBranchIfAnySet(
1115 Register receiver = x2;
1116 __ Sub(scratch1, argc, 1);
1118 __ JumpIfSmi(receiver, &convert_to_object);
1120 __ JumpIfRoot(receiver, Heap::kUndefinedValueRootIndex,
1121 &use_global_receiver);
1122 __ JumpIfRoot(receiver, Heap::kNullValueRootIndex, &use_global_receiver);
1125 __ JumpIfObjectType(receiver, scratch1, scratch2,
1128 __ Bind(&convert_to_object);
1135 __ Push(argc, receiver);
1137 __ Mov(receiver, x0);
1147 __ Mov(call_type, static_cast<int>(call_type_JS_func));
1148 __ B(&patch_receiver);
1150 __ Bind(&use_global_receiver);
1156 __ Bind(&patch_receiver);
1157 __ Sub(scratch1, argc, 1);
1160 __ B(&shift_arguments);
1165 __ Mov(call_type, static_cast<int>(call_type_func_proxy));
1167 __ B(
eq, &shift_arguments);
1168 __ Bind(&non_function);
1169 __ Mov(call_type, static_cast<int>(call_type_non_func));
1176 __ Sub(scratch1, argc, 1);
1183 __ Bind(&shift_arguments);
1192 __ Cmp(scratch1, jssp);
1196 __ Sub(argc, argc, 1);
1203 { Label js_function, non_proxy;
1204 __ Cbz(call_type, &js_function);
1207 __ Cmp(call_type, static_cast<int>(call_type_func_proxy));
1208 __ B(
ne, &non_proxy);
1211 __ Add(argc, argc, 1);
1212 __ GetBuiltinFunction(
function, Builtins::CALL_FUNCTION_PROXY);
1213 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1214 RelocInfo::CODE_TARGET);
1216 __ Bind(&non_proxy);
1217 __ GetBuiltinFunction(
function, Builtins::CALL_NON_FUNCTION);
1218 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1219 RelocInfo::CODE_TARGET);
1220 __ Bind(&js_function);
1230 Label dont_adapt_args;
1232 __ B(
eq, &dont_adapt_args);
1233 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1234 RelocInfo::CODE_TARGET);
1235 __ Bind(&dont_adapt_args);
1238 ParameterCount expected(0);
1239 __ InvokeCode(x3, expected, expected,
JUMP_FUNCTION, NullCallWrapper());
1243 void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
1245 const int kIndexOffset =
1247 const int kLimitOffset =
1256 Register args = x12;
1257 Register receiver = x14;
1258 Register
function = x15;
1263 __ Push(
function, args);
1270 Label enough_stack_space;
1271 __ LoadRoot(x10, Heap::kRealStackLimitRootIndex);
1276 __ Sub(x10, jssp, x10);
1279 __ B(
gt, &enough_stack_space);
1282 __ Push(
function, argc);
1285 if (
__ emit_debug_code()) {
1289 __ Bind(&enough_stack_space);
1294 Label push_receiver;
1310 Label convert_receiver_to_object, use_global_receiver;
1317 __ JumpIfSmi(receiver, &convert_receiver_to_object);
1318 __ JumpIfRoot(receiver, Heap::kNullValueRootIndex, &use_global_receiver);
1319 __ JumpIfRoot(receiver, Heap::kUndefinedValueRootIndex,
1320 &use_global_receiver);
1325 &push_receiver,
ge);
1328 __ Bind(&convert_receiver_to_object);
1331 __ Mov(receiver, x0);
1332 __ B(&push_receiver);
1334 __ Bind(&use_global_receiver);
1339 __ Bind(&push_receiver);
1344 Register current = x0;
1353 __ Push(x1, current);
1356 __ CallRuntime(Runtime::kGetProperty, 2);
1368 __ Cmp(current, x1);
1379 ParameterCount actual(current);
1380 __ SmiUntag(current);
1382 __ InvokeFunction(
function, actual,
CALL_FUNCTION, NullCallWrapper());
1383 frame_scope.GenerateLeaveFrame();
1388 __ Bind(&call_proxy);
1394 __ GetBuiltinFunction(x1, Builtins::CALL_FUNCTION_PROXY);
1395 __ Call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1396 RelocInfo::CODE_TARGET);
1403 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1407 __ Push(x11, x1, x10);
1413 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1428 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
1429 ASM_LOCATION(
"Builtins::Generate_ArgumentsAdaptorTrampoline");
1436 Register argc_actual = x0;
1437 Register argc_expected = x2;
1438 Register
function = x1;
1439 Register code_entry = x3;
1441 Label invoke, dont_adapt_arguments;
1443 Label enough, too_few;
1445 __ Cmp(argc_actual, argc_expected);
1448 __ B(
eq, &dont_adapt_arguments);
1451 EnterArgumentsAdaptorFrame(masm);
1453 Register copy_start = x10;
1454 Register copy_end = x11;
1455 Register copy_to = x12;
1456 Register scratch1 = x13, scratch2 = x14;
1463 __ Sub(copy_end, copy_start, argc_expected);
1465 __ Mov(copy_to, jssp);
1471 __ Claim(scratch1, 1);
1475 __ Bind(©_2_by_2);
1476 __ Ldp(scratch1, scratch2,
1478 __ Stp(scratch1, scratch2,
1480 __ Cmp(copy_start, copy_end);
1481 __ B(
hi, ©_2_by_2);
1491 EnterArgumentsAdaptorFrame(masm);
1493 Register copy_from = x10;
1494 Register copy_end = x11;
1495 Register copy_to = x12;
1496 Register scratch1 = x13, scratch2 = x14;
1503 __ Add(copy_from, copy_from, argc_actual);
1504 __ Mov(copy_to, jssp);
1506 __ Sub(copy_end, copy_end, argc_actual);
1512 __ Claim(scratch1, 1);
1516 __ Bind(©_2_by_2);
1517 __ Ldp(scratch1, scratch2,
1519 __ Stp(scratch1, scratch2,
1521 __ Cmp(copy_to, copy_end);
1522 __ B(
hi, ©_2_by_2);
1524 __ Mov(copy_to, copy_end);
1527 __ LoadRoot(scratch1, Heap::kUndefinedValueRootIndex);
1532 __ Stp(scratch1, scratch1,
1534 __ Cmp(copy_to, copy_end);
1543 __ Call(code_entry);
1546 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
1549 LeaveArgumentsAdaptorFrame(masm);
1553 __ Bind(&dont_adapt_arguments);
1554 __ Jump(code_entry);
1562 #endif // V8_TARGET_ARCH_ARM
const RegList kSafepointSavedRegisters
const intptr_t kSmiTagMask
static const int kCodeOffset
static const int kCodeEntryOffset
static const int kPrototypeOrInitialMapOffset
static int SlotOffset(int index)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf map
static Smi * FromInt(int value)
#define ASM_LOCATION(message)
static const int kGlobalReceiverOffset
static const int kConstructionCountOffset
static const int kDeoptimizationDataOffset
int MaskToBit(uint64_t mask)
#define ASSERT(condition)
static const int kContextOffset
const int kPointerSizeLog2
static const int kInstanceSizeOffset
static const int kUnusedPropertyFieldsOffset
static const int kInstanceSizesOffset
static const int kContextOffset
static const int kPretenureCreateCountOffset
static const int kInObjectPropertiesByte
MemOperand UntagSmiFieldMemOperand(Register object, int offset)
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
static const int kDontAdaptArgumentsSentinel
static void MaybeCallEntryHook(MacroAssembler *masm)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
static const int kCallerSPOffset
static const int kPropertiesOffset
static const int kExpressionsOffset
static const int kElementsOffset
static int OffsetOfElementAt(int index)
static const int kHeaderSize
static const int kMapOffset
static const int kFixedFrameSizeFromFp
const uint32_t kIsNotStringMask
static const int kLengthOffset
MemOperand FieldMemOperand(Register object, int offset)
static const int kFunctionOffset
static const int kFormalParameterCountOffset
static const int kHeaderSize
static const int kAllocationSiteOffset
#define ASSERT_EQ(v1, v2)
static const int kHeaderSize
static const int kPreAllocatedPropertyFieldsByte
static const int kValueOffset
static const int kNativeContextOffset
MemOperand ContextMemOperand(Register context, int index)
static const int kCompilerHintsOffset
static const int kSharedFunctionInfoOffset
const unsigned kXRegSizeLog2
MemOperand GlobalObjectMemOperand()
static const int kInstanceTypeOffset