32 #if V8_TARGET_ARCH_MIPS
45 #define __ ACCESS_MASM(masm)
48 void Builtins::Generate_Adaptor(MacroAssembler* masm,
63 int num_extra_args = 0;
73 __ Addu(
s0, a0, num_extra_args + 1);
76 __ JumpToExternalReference(ExternalReference(
id, masm->isolate()));
81 static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
98 static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
112 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
118 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
121 GenerateLoadInternalArrayFunction(masm, a1);
123 if (FLAG_debug_code) {
127 __ Assert(
ne, kUnexpectedInitialMapForInternalArrayFunction,
128 t0, Operand(zero_reg));
129 __ GetObjectType(a2, a3, t0);
130 __ Assert(
eq, kUnexpectedInitialMapForInternalArrayFunction,
137 InternalArrayConstructorStub stub(masm->isolate());
138 __ TailCallStub(&stub);
142 void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
148 Label generic_array_code;
151 GenerateLoadArrayFunction(masm, a1);
153 if (FLAG_debug_code) {
157 __ Assert(
ne, kUnexpectedInitialMapForArrayFunction1,
158 t0, Operand(zero_reg));
159 __ GetObjectType(a2, a3, t0);
160 __ Assert(
eq, kUnexpectedInitialMapForArrayFunction2,
166 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
167 ArrayConstructorStub stub(masm->isolate());
168 __ TailCallStub(&stub);
172 void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
180 Counters* counters = masm->isolate()->counters();
181 __ IncrementCounter(counters->string_ctor_calls(), 1, a2, a3);
183 Register
function = a1;
184 if (FLAG_debug_code) {
186 __ Assert(
eq, kUnexpectedStringFunction,
function, Operand(a2));
191 __ Branch(&no_arguments,
eq, a0, Operand(zero_reg));
193 __ Subu(a0, a0, Operand(1));
200 Register argument = a2;
201 Label not_cached, argument_is_string;
202 __ LookupNumberStringCache(a0,
208 __ IncrementCounter(counters->string_ctor_cached_number(), 1, a3, t0);
209 __ bind(&argument_is_string);
227 __ LoadGlobalFunctionInitialMap(
function, map, t0);
228 if (FLAG_debug_code) {
230 __ Assert(
eq, kUnexpectedStringWrapperInstanceSize,
233 __ Assert(
eq, kUnexpectedUnusedPropertiesOfStringWrapper,
234 t0, Operand(zero_reg));
238 __ LoadRoot(a3, Heap::kEmptyFixedArrayRootIndex);
251 Label convert_argument;
252 __ bind(¬_cached);
253 __ JumpIfSmi(a0, &convert_argument);
260 __ Branch(&convert_argument,
ne, t0, Operand(zero_reg));
261 __ mov(argument, a0);
262 __ IncrementCounter(counters->string_ctor_conversions(), 1, a3, t0);
263 __ Branch(&argument_is_string);
266 __ bind(&convert_argument);
268 __ IncrementCounter(counters->string_ctor_conversions(), 1, a3, t0);
275 __ mov(argument, v0);
276 __ Branch(&argument_is_string);
280 __ bind(&no_arguments);
281 __ LoadRoot(argument, Heap::kempty_stringRootIndex);
283 __ Branch(&argument_is_string);
287 __ bind(&gc_required);
288 __ IncrementCounter(counters->string_ctor_gc_required(), 1, a3, t0);
292 __ CallRuntime(Runtime::kNewStringWrapper, 1);
298 static void CallRuntimePassFunction(
305 __ CallRuntime(function_id, 1);
311 static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
319 static void GenerateTailCallToReturnedCode(MacroAssembler* masm) {
325 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
332 __ LoadRoot(t0, Heap::kStackLimitRootIndex);
333 __ Branch(&ok,
hs,
sp, Operand(t0));
335 CallRuntimePassFunction(masm, Runtime::kHiddenTryInstallOptimizedCode);
336 GenerateTailCallToReturnedCode(masm);
339 GenerateTailCallToSharedCode(masm);
343 static void Generate_JSConstructStubHelper(MacroAssembler* masm,
344 bool is_api_function,
345 bool count_constructions,
346 bool create_memento) {
356 ASSERT(!is_api_function || !count_constructions);
359 ASSERT(!is_api_function || !create_memento);
362 ASSERT(!count_constructions || !create_memento);
364 Isolate* isolate = masm->isolate();
375 FrameScope scope(masm, StackFrame::CONSTRUCT);
377 if (create_memento) {
378 __ AssertUndefinedOrAllocationSite(a2, a3);
384 __ MultiPushReversed(a0.bit() | a1.bit());
387 __ LoadRoot(t7, Heap::kUndefinedValueRootIndex);
389 Label rt_call, allocated;
392 if (FLAG_inline_new) {
393 Label undo_allocation;
394 #ifdef ENABLE_DEBUGGER_SUPPORT
395 ExternalReference debug_step_in_fp =
396 ExternalReference::debug_step_in_fp_address(isolate);
397 __ li(a2, Operand(debug_step_in_fp));
399 __ Branch(&rt_call,
ne, a2, Operand(zero_reg));
405 __ JumpIfSmi(a2, &rt_call);
406 __ GetObjectType(a2, a3, t4);
417 if (count_constructions) {
423 __ lbu(t0, constructor_count);
424 __ Subu(t0, t0, Operand(1));
425 __ sb(t0, constructor_count);
426 __ Branch(&allocate,
ne, t0, Operand(zero_reg));
430 __ CallRuntime(Runtime::kHiddenFinalizeInstanceSize, 1);
441 if (create_memento) {
453 __ LoadRoot(t6, Heap::kEmptyFixedArrayRootIndex);
471 if (count_constructions) {
472 __ LoadRoot(t7, Heap::kUndefinedValueRootIndex);
479 __ Addu(t6, t4, Operand(at));
481 if (FLAG_debug_code) {
482 __ Assert(
le, kUnexpectedNumberOfPreAllocatedPropertyFields,
485 __ InitializeFieldsWithFiller(t5, a0, t7);
487 __ LoadRoot(t7, Heap::kOnePointerFillerMapRootIndex);
488 __ InitializeFieldsWithFiller(t5, t6, t7);
489 }
else if (create_memento) {
492 __ Addu(a0, t4, Operand(at));
493 __ LoadRoot(t7, Heap::kUndefinedValueRootIndex);
494 __ InitializeFieldsWithFiller(t5, a0, t7);
498 __ LoadRoot(t7, Heap::kAllocationMementoMapRootIndex);
508 __ LoadRoot(t7, Heap::kUndefinedValueRootIndex);
510 __ Addu(a0, t4, Operand(at));
511 __ InitializeFieldsWithFiller(t5, a0, t7);
531 __ Addu(a3, a3, Operand(t6));
537 __ Branch(&allocated,
eq, a3, Operand(zero_reg));
539 a3, Operand(zero_reg));
561 __ LoadRoot(t6, Heap::kFixedArrayMapRootIndex);
581 if (count_constructions) {
582 __ LoadRoot(t7, Heap::kUndefinedValueRootIndex);
583 }
else if (FLAG_debug_code) {
584 __ LoadRoot(t8, Heap::kUndefinedValueRootIndex);
585 __ Assert(
eq, kUndefinedValueNotLoaded, t7, Operand(t8));
592 __ Branch(&loop,
less, a2, Operand(t6));
612 __ bind(&undo_allocation);
613 __ UndoAllocationInNewSpace(t4, t5);
619 if (create_memento) {
626 if (create_memento) {
627 __ CallRuntime(Runtime::kHiddenNewObjectWithAllocationSite, 2);
629 __ CallRuntime(Runtime::kHiddenNewObject, 1);
636 Label count_incremented;
637 if (create_memento) {
638 __ jmp(&count_incremented);
644 if (create_memento) {
646 __ LoadRoot(t5, Heap::kUndefinedValueRootIndex);
647 __ Branch(&count_incremented,
eq, a2, Operand(t5));
655 __ bind(&count_incremented);
688 __ Addu(t0, a2, Operand(t0));
692 __ Addu(a3, a3, Operand(-2));
698 if (is_api_function) {
701 masm->isolate()->builtins()->HandleApiCallConstruct();
702 __ Call(code, RelocInfo::CODE_TARGET);
704 ParameterCount actual(a0);
709 if (!is_api_function && !count_constructions) {
710 masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
719 Label use_receiver, exit;
726 __ JumpIfSmi(v0, &use_receiver);
730 __ GetObjectType(v0, a1, a3);
735 __ bind(&use_receiver);
753 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, a1, a2);
758 void Builtins::Generate_JSConstructStubCountdown(MacroAssembler* masm) {
759 Generate_JSConstructStubHelper(masm,
false,
true,
false);
763 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
764 Generate_JSConstructStubHelper(masm,
false,
false, FLAG_pretenuring_call_new);
768 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
769 Generate_JSConstructStubHelper(masm,
true,
false,
false);
773 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
787 __ mov(
cp, zero_reg);
814 __ Branch(&loop,
ne,
s0, Operand(t2));
818 __ LoadRoot(t0, Heap::kUndefinedValueRootIndex);
831 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
835 ParameterCount actual(a0);
846 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
847 Generate_JSEntryTrampolineHelper(masm,
false);
851 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
852 Generate_JSEntryTrampolineHelper(masm,
true);
856 void Builtins::Generate_CompileUnoptimized(MacroAssembler* masm) {
857 CallRuntimePassFunction(masm, Runtime::kHiddenCompileUnoptimized);
858 GenerateTailCallToReturnedCode(masm);
862 static void CallCompileOptimized(MacroAssembler* masm,
bool concurrent) {
868 __ Push(masm->isolate()->factory()->ToBoolean(concurrent));
870 __ CallRuntime(Runtime::kHiddenCompileOptimized, 2);
876 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
877 CallCompileOptimized(masm,
false);
878 GenerateTailCallToReturnedCode(masm);
882 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
883 CallCompileOptimized(masm,
true);
884 GenerateTailCallToReturnedCode(masm);
889 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
905 (a0.bit() | a1.bit() | ra.bit() |
fp.
bit()) & ~
sp.
bit();
906 FrameScope scope(masm, StackFrame::MANUAL);
907 __ MultiPush(saved_regs);
908 __ PrepareCallCFunction(2, 0, a2);
909 __ li(a1, Operand(ExternalReference::isolate_address(masm->isolate())));
911 ExternalReference::get_make_code_young_function(masm->isolate()), 2);
912 __ MultiPop(saved_regs);
916 #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \
917 void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \
918 MacroAssembler* masm) { \
919 GenerateMakeCodeYoungAgainCommon(masm); \
921 void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \
922 MacroAssembler* masm) { \
923 GenerateMakeCodeYoungAgainCommon(masm); \
926 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
929 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
944 (a0.bit() | a1.bit() | ra.bit() |
fp.
bit()) & ~
sp.
bit();
945 FrameScope scope(masm, StackFrame::MANUAL);
946 __ MultiPush(saved_regs);
947 __ PrepareCallCFunction(2, 0, a2);
948 __ li(a1, Operand(ExternalReference::isolate_address(masm->isolate())));
950 ExternalReference::get_mark_code_as_executed_function(masm->isolate()),
952 __ MultiPop(saved_regs);
964 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
965 GenerateMakeCodeYoungAgainCommon(masm);
969 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
979 __ CallRuntime(Runtime::kHiddenNotifyStubFailure, 0, save_doubles);
988 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
993 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
994 Generate_NotifyStubFailureHelper(masm,
kSaveFPRegs);
998 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
1005 __ CallRuntime(Runtime::kHiddenNotifyDeoptimized, 1);
1012 Label with_tos_register, unknown_state;
1013 __ Branch(&with_tos_register,
1019 __ bind(&with_tos_register);
1027 __ bind(&unknown_state);
1028 __ stop(
"no cases left");
1032 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1037 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
1042 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
1047 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1054 __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
1072 __ addu(v0, v0, a1);
1080 void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) {
1083 __ LoadRoot(at, Heap::kStackLimitRootIndex);
1084 __ Branch(&ok,
hs,
sp, Operand(at));
1087 __ CallRuntime(Runtime::kHiddenStackGuard, 0);
1089 __ Jump(masm->isolate()->builtins()->OnStackReplacement(),
1090 RelocInfo::CODE_TARGET);
1097 void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
1101 __ Branch(&done,
ne, a0, Operand(zero_reg));
1102 __ LoadRoot(t2, Heap::kUndefinedValueRootIndex);
1104 __ Addu(a0, a0, Operand(1));
1111 Label slow, non_function;
1113 __ addu(at,
sp, at);
1115 __ JumpIfSmi(a1, &non_function);
1116 __ GetObjectType(a1, a2, a2);
1122 Label shift_arguments;
1123 __ li(t0, Operand(0, RelocInfo::NONE32));
1124 { Label convert_to_object, use_global_receiver, patch_receiver;
1133 __ Branch(&shift_arguments,
ne, t3, Operand(zero_reg));
1137 __ Branch(&shift_arguments,
ne, t3, Operand(zero_reg));
1142 __ addu(a2,
sp, at);
1147 __ JumpIfSmi(a2, &convert_to_object, t2);
1149 __ LoadRoot(a3, Heap::kUndefinedValueRootIndex);
1150 __ Branch(&use_global_receiver,
eq, a2, Operand(a3));
1151 __ LoadRoot(a3, Heap::kNullValueRootIndex);
1152 __ Branch(&use_global_receiver,
eq, a2, Operand(a3));
1155 __ GetObjectType(a2, a3, a3);
1158 __ bind(&convert_to_object);
1173 __ addu(at,
sp, at);
1175 __ li(t0, Operand(0, RelocInfo::NONE32));
1176 __ Branch(&patch_receiver);
1178 __ bind(&use_global_receiver);
1182 __ bind(&patch_receiver);
1184 __ addu(a3,
sp, at);
1187 __ Branch(&shift_arguments);
1192 __ li(t0, Operand(1, RelocInfo::NONE32));
1195 __ bind(&non_function);
1196 __ li(t0, Operand(2, RelocInfo::NONE32));
1206 __ addu(a2,
sp, at);
1215 __ bind(&shift_arguments);
1219 __ addu(a2,
sp, at);
1225 __ Branch(&loop,
ne, a2, Operand(
sp));
1228 __ Subu(a0, a0, Operand(1));
1237 { Label
function, non_proxy;
1238 __ Branch(&
function,
eq, t0, Operand(zero_reg));
1240 __ mov(a2, zero_reg);
1241 __ Branch(&non_proxy,
ne, t0, Operand(1));
1244 __ Addu(a0, a0, Operand(1));
1245 __ GetBuiltinFunction(a1, Builtins::CALL_FUNCTION_PROXY);
1246 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1247 RelocInfo::CODE_TARGET);
1249 __ bind(&non_proxy);
1250 __ GetBuiltinFunction(a1, Builtins::CALL_NON_FUNCTION);
1251 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1252 RelocInfo::CODE_TARGET);
1266 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1267 RelocInfo::CODE_TARGET,
ne, a2, Operand(a0));
1270 ParameterCount expected(0);
1271 __ InvokeCode(a3, expected, expected,
JUMP_FUNCTION, NullCallWrapper());
1275 void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
1276 const int kIndexOffset =
1278 const int kLimitOffset =
1297 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
1300 __ subu(a2,
sp, a2);
1303 __ Branch(&okay,
gt, a2, Operand(t3));
1313 __ mov(a1, zero_reg);
1320 Label push_receiver;
1322 __ GetObjectType(a1, a2, a2);
1332 Label call_to_object, use_global_receiver;
1336 __ Branch(&push_receiver, ne, t3, Operand(zero_reg));
1340 __ Branch(&push_receiver, ne, t3, Operand(zero_reg));
1343 __ JumpIfSmi(a0, &call_to_object);
1344 __ LoadRoot(a1, Heap::kNullValueRootIndex);
1345 __ Branch(&use_global_receiver,
eq, a0, Operand(a1));
1346 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
1347 __ Branch(&use_global_receiver,
eq, a0, Operand(a2));
1352 __ GetObjectType(a0, a1, a1);
1357 __ bind(&call_to_object);
1361 __ Branch(&push_receiver);
1363 __ bind(&use_global_receiver);
1369 __ bind(&push_receiver);
1385 __ CallRuntime(Runtime::kGetProperty, 2);
1397 __ Branch(&loop, ne, a0, Operand(a1));
1401 ParameterCount actual(a0);
1404 __ GetObjectType(a1, a2, a2);
1409 frame_scope.GenerateLeaveFrame();
1414 __ bind(&call_proxy);
1416 __ Addu(a0, a0, Operand(1));
1417 __ li(a2, Operand(0, RelocInfo::NONE32));
1418 __ GetBuiltinFunction(a1, Builtins::CALL_FUNCTION_PROXY);
1419 __ Call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1420 RelocInfo::CODE_TARGET);
1429 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1432 __ MultiPush(a0.bit() | a1.bit() | t0.bit() |
fp.
bit() | ra.bit());
1438 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1447 __ MultiPop(
fp.
bit() | ra.bit());
1455 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
1463 Label invoke, dont_adapt_arguments;
1465 Label enough, too_few;
1467 __ Branch(&dont_adapt_arguments,
eq,
1470 __ Branch(&too_few,
Uless, a0, Operand(a2));
1478 EnterArgumentsAdaptorFrame(masm);
1482 __ Addu(a0,
fp, a0);
1487 __ subu(a2, a0, a2);
1507 EnterArgumentsAdaptorFrame(masm);
1515 __ Addu(a0,
fp, a0);
1539 __ LoadRoot(t0, Heap::kUndefinedValueRootIndex);
1541 __ Subu(a2,
fp, Operand(t2));
1559 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
1562 LeaveArgumentsAdaptorFrame(masm);
1569 __ bind(&dont_adapt_arguments);
1578 #endif // V8_TARGET_ARCH_MIPS
static const int kCodeOffset
static const int kCodeEntryOffset
static const int kPrototypeOrInitialMapOffset
static int SlotOffset(int index)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf map
static Smi * FromInt(int value)
static const int kGlobalReceiverOffset
static const int kConstructionCountOffset
static const int kDeoptimizationDataOffset
#define ASSERT(condition)
const RegList kJSCallerSaved
static const int kContextOffset
const int kPointerSizeLog2
static const int kInstanceSizeOffset
static const int kUnusedPropertyFieldsOffset
static const int kInstanceSizesOffset
MemOperand ContextOperand(Register context, int index)
static const int kContextOffset
static const int kPretenureCreateCountOffset
static const int kInObjectPropertiesByte
const uint32_t kNotStringTag
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
static const int kDontAdaptArgumentsSentinel
static void MaybeCallEntryHook(MacroAssembler *masm)
const RegList kCalleeSaved
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
static const int kCallerSPOffset
static const int kPropertiesOffset
static const int kExpressionsOffset
static const int kElementsOffset
static int OffsetOfElementAt(int index)
static const int kHeaderSize
static const int kMapOffset
static const int kFixedFrameSizeFromFp
const uint32_t kIsNotStringMask
static const int kLengthOffset
MemOperand FieldMemOperand(Register object, int offset)
static const int kFunctionOffset
static const int kFormalParameterCountOffset
static const int kHeaderSize
static const int kAllocationSiteOffset
#define ASSERT_EQ(v1, v2)
static const int kHeaderSize
static const int kPreAllocatedPropertyFieldsByte
static const int kInstrSize
static const int kValueOffset
static const int kNativeContextOffset
static const int kCompilerHintsOffset
static const int kSharedFunctionInfoOffset
static const int kInstanceTypeOffset