30 #if defined(V8_TARGET_ARCH_X64)
41 const int Deoptimizer::table_entry_size_ = 10;
51 AssertNoAllocation no_allocation;
53 if (!function->IsOptimized())
return;
57 function->shared()->ClearOptimizedCodeMap();
60 Code*
code =
function->code();
64 code->InvalidateRelocation();
72 Address instruction_start =
function->code()->instruction_start();
76 DeoptimizationInputData* deopt_data =
78 for (
int i = 0; i < deopt_data->DeoptCount(); i++) {
79 if (deopt_data->Pc(i)->value() == -1)
continue;
81 Address call_address = instruction_start + deopt_data->Pc(i)->value();
87 call_address >= prev_call_address +
patch_size());
90 prev_call_address = call_address;
94 Isolate* isolate = code->GetIsolate();
98 DeoptimizerData* data = isolate->deoptimizer_data();
99 node->set_next(data->deoptimizing_code_list_);
100 data->deoptimizing_code_list_ = node;
105 isolate->heap()->mark_compact_collector()->InvalidateCode(code);
109 if (FLAG_trace_deopt) {
110 PrintF(
"[forced deoptimization: ");
111 function->PrintName();
117 static const byte kJnsInstruction = 0x79;
118 static const byte kJnsOffset = 0x1f;
119 static const byte kJaeInstruction = 0x73;
120 static const byte kJaeOffset = 0x07;
121 static const byte kCallInstruction = 0xe8;
122 static const byte kNopByteOne = 0x66;
123 static const byte kNopByteTwo = 0x90;
128 Code* replacement_code) {
149 if (FLAG_count_based_interrupts) {
150 ASSERT_EQ(kJnsInstruction, *(call_target_address - 3));
151 ASSERT_EQ(kJnsOffset, *(call_target_address - 2));
153 ASSERT_EQ(kJaeInstruction, *(call_target_address - 3));
154 ASSERT_EQ(kJaeOffset, *(call_target_address - 2));
156 ASSERT_EQ(kCallInstruction, *(call_target_address - 1));
157 *(call_target_address - 3) = kNopByteOne;
158 *(call_target_address - 2) = kNopByteTwo;
160 replacement_code->entry());
162 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
163 unoptimized_code, call_target_address, replacement_code);
170 Code* replacement_code) {
172 ASSERT(replacement_code->entry() ==
176 ASSERT_EQ(kNopByteOne, *(call_target_address - 3));
177 ASSERT_EQ(kNopByteTwo, *(call_target_address - 2));
178 ASSERT_EQ(kCallInstruction, *(call_target_address - 1));
179 if (FLAG_count_based_interrupts) {
180 *(call_target_address - 3) = kJnsInstruction;
181 *(call_target_address - 2) = kJnsOffset;
183 *(call_target_address - 3) = kJaeInstruction;
184 *(call_target_address - 2) = kJaeOffset;
187 check_code->entry());
189 check_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
190 unoptimized_code, call_target_address, check_code);
194 static int LookupBailoutId(DeoptimizationInputData* data, BailoutId ast_id) {
195 ByteArray* translations = data->TranslationByteArray();
196 int length = data->DeoptCount();
197 for (
int i = 0; i < length; i++) {
198 if (data->AstId(i) == ast_id) {
199 TranslationIterator it(translations, data->TranslationIndex(i)->value());
200 int value = it.Next();
201 ASSERT(Translation::BEGIN == static_cast<Translation::Opcode>(value));
204 if (value == 1)
return i;
212 void Deoptimizer::DoComputeOsrOutputFrame() {
214 optimized_code_->deoptimization_data());
215 unsigned ast_id = data->OsrAstId()->value();
218 ASSERT(bailout_id_ == ast_id);
220 int bailout_id = LookupBailoutId(data, BailoutId(ast_id));
221 unsigned translation_index = data->TranslationIndex(bailout_id)->value();
222 ByteArray* translations = data->TranslationByteArray();
224 TranslationIterator iterator(translations, translation_index);
227 ASSERT(Translation::BEGIN == opcode);
229 int count = iterator.Next();
236 ASSERT(Translation::JS_FRAME == opcode);
237 unsigned node_id = iterator.Next();
239 ASSERT(node_id == ast_id);
240 int closure_id = iterator.Next();
242 ASSERT_EQ(Translation::kSelfLiteralId, closure_id);
243 unsigned height = iterator.Next();
245 USE(height_in_bytes);
247 unsigned fixed_size = ComputeFixedSize(function_);
249 ASSERT(fixed_size + height_in_bytes == input_frame_size);
252 unsigned outgoing_height = data->ArgumentsStackHeight(bailout_id)->value();
253 unsigned outgoing_size = outgoing_height *
kPointerSize;
254 unsigned output_frame_size = fixed_size + stack_slot_size + outgoing_size;
255 ASSERT(outgoing_size == 0);
257 if (FLAG_trace_osr) {
259 reinterpret_cast<intptr_t>(function_));
261 PrintF(
" => node=%u, frame=%d->%d]\n",
271 output_frame_size, function_);
277 int parameter_count = function_->shared()->formal_parameter_count() + 1;
278 for (
int i = 0; i < parameter_count; ++i) {
287 int limit = input_offset - (parameter_count *
kPointerSize);
288 while (ok && input_offset > limit) {
289 ok = DoOsrTranslateCommand(&iterator, &input_offset);
297 intptr_t input_value = input_->
GetFrameSlot(input_offset);
298 if (FLAG_trace_osr) {
299 const char* name =
"UNKNOWN";
302 name =
"caller's pc";
315 "(fixed part - %s)\n",
327 while (ok && input_offset >= 0) {
328 ok = DoOsrTranslateCommand(&iterator, &input_offset);
335 output_[0]->
SetPc(reinterpret_cast<intptr_t>(from_));
341 unsigned pc_offset = data->OsrPcOffset()->value();
342 intptr_t
pc =
reinterpret_cast<intptr_t
>(
343 optimized_code_->
entry() + pc_offset);
344 output_[0]->
SetPc(pc);
349 reinterpret_cast<intptr_t>(continuation->entry()));
351 if (FLAG_trace_osr) {
353 ok ?
"finished" :
"aborted",
354 reinterpret_cast<intptr_t>(function_));
361 void Deoptimizer::DoComputeArgumentsAdaptorFrame(TranslationIterator* iterator,
364 unsigned height = iterator->Next();
366 if (FLAG_trace_deopt) {
367 PrintF(
" translating arguments adaptor => height=%d\n", height_in_bytes);
371 unsigned output_frame_size = height_in_bytes + fixed_frame_size;
379 ASSERT(frame_index > 0 && frame_index < output_count_ - 1);
381 output_[frame_index] = output_frame;
385 intptr_t top_address;
386 top_address = output_[frame_index - 1]->
GetTop() - output_frame_size;
387 output_frame->SetTop(top_address);
390 int parameter_count = height;
391 unsigned output_offset = output_frame_size;
392 for (
int i = 0; i < parameter_count; ++i) {
394 DoTranslateCommand(iterator, frame_index, output_offset);
399 intptr_t callers_pc = output_[frame_index - 1]->
GetPc();
400 output_frame->SetFrameSlot(output_offset, callers_pc);
401 if (FLAG_trace_deopt) {
404 top_address + output_offset, output_offset, callers_pc);
409 intptr_t value = output_[frame_index - 1]->
GetFp();
410 output_frame->SetFrameSlot(output_offset, value);
412 output_frame->SetFp(fp_value);
413 if (FLAG_trace_deopt) {
416 fp_value, output_offset, value);
421 intptr_t context =
reinterpret_cast<intptr_t
>(
423 output_frame->SetFrameSlot(output_offset, context);
424 if (FLAG_trace_deopt) {
426 V8PRIxPTR " ; context (adaptor sentinel)\n",
427 top_address + output_offset, output_offset, context);
432 value =
reinterpret_cast<intptr_t
>(
function);
433 output_frame->SetFrameSlot(output_offset, value);
434 if (FLAG_trace_deopt) {
437 top_address + output_offset, output_offset, value);
442 value =
reinterpret_cast<intptr_t
>(
Smi::FromInt(height - 1));
443 output_frame->SetFrameSlot(output_offset, value);
444 if (FLAG_trace_deopt) {
447 top_address + output_offset, output_offset, value, height - 1);
450 ASSERT(0 == output_offset);
452 Builtins* builtins = isolate_->
builtins();
453 Code* adaptor_trampoline =
454 builtins->
builtin(Builtins::kArgumentsAdaptorTrampoline);
455 intptr_t pc_value =
reinterpret_cast<intptr_t
>(
456 adaptor_trampoline->instruction_start() +
457 isolate_->
heap()->arguments_adaptor_deopt_pc_offset()->value());
458 output_frame->SetPc(pc_value);
462 void Deoptimizer::DoComputeConstructStubFrame(TranslationIterator* iterator,
464 Builtins* builtins = isolate_->
builtins();
465 Code* construct_stub = builtins->
builtin(Builtins::kJSConstructStubGeneric);
467 unsigned height = iterator->Next();
469 if (FLAG_trace_deopt) {
470 PrintF(
" translating construct stub => height=%d\n", height_in_bytes);
474 unsigned output_frame_size = height_in_bytes + fixed_frame_size;
479 output_frame->SetFrameType(StackFrame::CONSTRUCT);
482 ASSERT(frame_index > 0 && frame_index < output_count_ - 1);
484 output_[frame_index] = output_frame;
488 intptr_t top_address;
489 top_address = output_[frame_index - 1]->
GetTop() - output_frame_size;
490 output_frame->SetTop(top_address);
493 int parameter_count = height;
494 unsigned output_offset = output_frame_size;
495 for (
int i = 0; i < parameter_count; ++i) {
497 DoTranslateCommand(iterator, frame_index, output_offset);
502 intptr_t callers_pc = output_[frame_index - 1]->
GetPc();
503 output_frame->SetFrameSlot(output_offset, callers_pc);
504 if (FLAG_trace_deopt) {
507 top_address + output_offset, output_offset, callers_pc);
512 intptr_t value = output_[frame_index - 1]->
GetFp();
513 output_frame->SetFrameSlot(output_offset, value);
515 output_frame->SetFp(fp_value);
516 if (FLAG_trace_deopt) {
519 fp_value, output_offset, value);
524 value = output_[frame_index - 1]->
GetContext();
525 output_frame->SetFrameSlot(output_offset, value);
526 if (FLAG_trace_deopt) {
529 top_address + output_offset, output_offset, value);
534 value =
reinterpret_cast<intptr_t
>(
Smi::FromInt(StackFrame::CONSTRUCT));
535 output_frame->SetFrameSlot(output_offset, value);
536 if (FLAG_trace_deopt) {
538 V8PRIxPTR " ; function (construct sentinel)\n",
539 top_address + output_offset, output_offset, value);
544 value =
reinterpret_cast<intptr_t
>(construct_stub);
545 output_frame->SetFrameSlot(output_offset, value);
546 if (FLAG_trace_deopt) {
549 top_address + output_offset, output_offset, value);
554 value =
reinterpret_cast<intptr_t
>(
Smi::FromInt(height - 1));
555 output_frame->SetFrameSlot(output_offset, value);
556 if (FLAG_trace_deopt) {
559 top_address + output_offset, output_offset, value, height - 1);
565 value = output_frame->GetFrameSlot(output_frame_size - kPointerSize);
566 output_frame->SetFrameSlot(output_offset, value);
567 if (FLAG_trace_deopt) {
570 top_address + output_offset, output_offset, value);
573 ASSERT(0 == output_offset);
575 intptr_t pc =
reinterpret_cast<intptr_t
>(
576 construct_stub->instruction_start() +
577 isolate_->
heap()->construct_stub_deopt_pc_offset()->value());
578 output_frame->SetPc(pc);
582 void Deoptimizer::DoComputeAccessorStubFrame(TranslationIterator* iterator,
584 bool is_setter_stub_frame) {
591 const char* kind = is_setter_stub_frame ?
"setter" :
"getter";
592 if (FLAG_trace_deopt) {
593 PrintF(
" translating %s stub => height=%u\n", kind, height_in_bytes);
601 unsigned fixed_frame_entries = 1 + 4 + (is_setter_stub_frame ? 1 : 0);
602 unsigned fixed_frame_size = fixed_frame_entries *
kPointerSize;
603 unsigned output_frame_size = height_in_bytes + fixed_frame_size;
611 ASSERT(frame_index > 0 && frame_index < output_count_ - 1);
613 output_[frame_index] = output_frame;
617 intptr_t top_address = output_[frame_index - 1]->
GetTop() - output_frame_size;
618 output_frame->SetTop(top_address);
620 unsigned output_offset = output_frame_size;
624 intptr_t callers_pc = output_[frame_index - 1]->
GetPc();
625 output_frame->SetFrameSlot(output_offset, callers_pc);
626 if (FLAG_trace_deopt) {
629 top_address + output_offset, output_offset, callers_pc);
634 intptr_t value = output_[frame_index - 1]->
GetFp();
635 output_frame->SetFrameSlot(output_offset, value);
637 output_frame->SetFp(fp_value);
638 if (FLAG_trace_deopt) {
641 fp_value, output_offset, value);
646 value = output_[frame_index - 1]->
GetContext();
647 output_frame->SetFrameSlot(output_offset, value);
648 if (FLAG_trace_deopt) {
651 top_address + output_offset, output_offset, value);
657 output_frame->SetFrameSlot(output_offset, value);
658 if (FLAG_trace_deopt) {
660 " ; function (%s sentinel)\n",
661 top_address + output_offset, output_offset, value, kind);
667 Builtins::kStoreIC_Setter_ForDeopt :
668 Builtins::kLoadIC_Getter_ForDeopt;
670 value =
reinterpret_cast<intptr_t
>(accessor_stub);
671 output_frame->SetFrameSlot(output_offset, value);
672 if (FLAG_trace_deopt) {
675 top_address + output_offset, output_offset, value);
681 iterator->Skip(Translation::NumberOfOperandsFor(opcode));
683 if (is_setter_stub_frame) {
687 DoTranslateCommand(iterator, frame_index, output_offset);
690 ASSERT(0 == output_offset);
692 Smi* offset = is_setter_stub_frame ?
693 isolate_->
heap()->setter_stub_deopt_pc_offset() :
694 isolate_->
heap()->getter_stub_deopt_pc_offset();
695 intptr_t pc =
reinterpret_cast<intptr_t
>(
696 accessor_stub->instruction_start() + offset->value());
697 output_frame->SetPc(pc);
701 void Deoptimizer::DoComputeJSFrame(TranslationIterator* iterator,
703 BailoutId node_id = BailoutId(iterator->Next());
704 JSFunction*
function;
705 if (frame_index != 0) {
708 int closure_id = iterator->Next();
710 ASSERT_EQ(Translation::kSelfLiteralId, closure_id);
711 function = function_;
713 unsigned height = iterator->Next();
715 if (FLAG_trace_deopt) {
717 function->PrintName();
718 PrintF(
" => node=%d, height=%d\n", node_id.ToInt(), height_in_bytes);
723 unsigned fixed_frame_size = ComputeFixedSize(
function);
725 unsigned output_frame_size = height_in_bytes + fixed_frame_size;
730 output_frame->SetFrameType(StackFrame::JAVA_SCRIPT);
732 bool is_bottommost = (0 == frame_index);
733 bool is_topmost = (output_count_ - 1 == frame_index);
734 ASSERT(frame_index >= 0 && frame_index < output_count_);
736 output_[frame_index] = output_frame;
742 intptr_t top_address;
748 top_address = output_[frame_index - 1]->
GetTop() - output_frame_size;
750 output_frame->SetTop(top_address);
753 int parameter_count =
function->shared()->formal_parameter_count() + 1;
754 unsigned output_offset = output_frame_size;
755 unsigned input_offset = input_frame_size;
756 for (
int i = 0; i < parameter_count; ++i) {
758 DoTranslateCommand(iterator, frame_index, output_offset);
776 value = output_[frame_index - 1]->
GetPc();
778 output_frame->SetFrameSlot(output_offset, value);
779 if (FLAG_trace_deopt) {
782 top_address + output_offset, output_offset, value);
794 value = output_[frame_index - 1]->
GetFp();
796 output_frame->SetFrameSlot(output_offset, value);
799 output_frame->SetFp(fp_value);
800 if (is_topmost) output_frame->SetRegister(
rbp.
code(), fp_value);
801 if (FLAG_trace_deopt) {
804 fp_value, output_offset, value);
815 value =
reinterpret_cast<intptr_t
>(
function->context());
817 output_frame->SetFrameSlot(output_offset, value);
818 output_frame->SetContext(value);
819 if (is_topmost) output_frame->SetRegister(
rsi.
code(), value);
820 if (FLAG_trace_deopt) {
823 top_address + output_offset, output_offset, value);
829 value =
reinterpret_cast<intptr_t
>(
function);
833 output_frame->SetFrameSlot(output_offset, value);
834 if (FLAG_trace_deopt) {
837 top_address + output_offset, output_offset, value);
841 for (
unsigned i = 0; i < height; ++i) {
843 DoTranslateCommand(iterator, frame_index, output_offset);
845 ASSERT(0 == output_offset);
848 Code* non_optimized_code =
function->shared()->code();
849 FixedArray* raw_data = non_optimized_code->deoptimization_data();
851 Address start = non_optimized_code->instruction_start();
852 unsigned pc_and_state =
GetOutputInfo(data, node_id, function->shared());
854 intptr_t pc_value =
reinterpret_cast<intptr_t
>(start + pc_offset);
855 output_frame->SetPc(pc_value);
862 if (is_topmost && bailout_type_ !=
DEBUGGER) {
863 Code* continuation = (bailout_type_ ==
EAGER)
866 output_frame->SetContinuation(
867 reinterpret_cast<intptr_t>(continuation->entry()));
872 void Deoptimizer::FillInputFrame(
Address tos, JavaScriptFrame* frame) {
894 void Deoptimizer::EntryGenerator::Generate() {
902 __ subq(
rsp, Immediate(kDoubleRegsSize));
907 __ movsd(Operand(
rsp, offset), xmm_reg);
912 for (
int i = 0; i < kNumberOfRegisters; i++) {
917 const int kSavedRegistersAreaSize = kNumberOfRegisters * kPointerSize +
942 __ movq(arg3, Operand(
rsp, kSavedRegistersAreaSize));
946 if (type() == EAGER) {
948 __ lea(arg5, Operand(
rsp, kSavedRegistersAreaSize + 1 * kPointerSize));
950 __ movq(arg4, Operand(
rsp, kSavedRegistersAreaSize + 1 * kPointerSize));
951 __ lea(arg5, Operand(
rsp, kSavedRegistersAreaSize + 2 * kPointerSize));
958 __ PrepareCallCFunction(6);
961 __ Set(arg2, type());
967 __ movq(Operand(
rsp, 4 * kPointerSize), arg5);
968 __ LoadAddress(arg5, ExternalReference::isolate_address());
969 __ movq(Operand(
rsp, 5 * kPointerSize), arg5);
972 __ LoadAddress(
r9, ExternalReference::isolate_address());
975 Isolate* isolate = masm()->isolate();
978 AllowExternalCallThatCantCauseGC scope(masm());
979 __ CallCFunction(ExternalReference::new_deoptimizer_function(isolate), 6);
986 for (
int i = kNumberOfRegisters -1; i >= 0; i--) {
988 __ pop(Operand(
rbx, offset));
994 int dst_offset = i * kDoubleSize + double_regs_offset;
995 __ pop(Operand(
rbx, dst_offset));
999 if (type() == EAGER) {
1000 __ addq(
rsp, Immediate(kPointerSize));
1002 __ addq(
rsp, Immediate(2 * kPointerSize));
1016 __ pop(Operand(
rdx, 0));
1017 __ addq(
rdx, Immediate(
sizeof(intptr_t)));
1023 __ PrepareCallCFunction(2);
1025 __ LoadAddress(arg2, ExternalReference::isolate_address());
1027 AllowExternalCallThatCantCauseGC scope(masm());
1029 ExternalReference::compute_output_frames_function(isolate), 2);
1034 Label outer_push_loop, inner_push_loop;
1040 __ bind(&outer_push_loop);
1044 __ bind(&inner_push_loop);
1045 __ subq(
rcx, Immediate(
sizeof(intptr_t)));
1049 __ addq(
rax, Immediate(kPointerSize));
1051 __ j(
below, &outer_push_loop);
1054 if (type() == OSR) {
1057 int src_offset = i * kDoubleSize + double_regs_offset;
1058 __ movsd(xmm_reg, Operand(
rbx, src_offset));
1063 if (type() != OSR) {
1070 for (
int i = 0; i < kNumberOfRegisters; i++) {
1072 __ push(Operand(
rbx, offset));
1076 for (
int i = kNumberOfRegisters - 1; i >= 0 ; i--) {
1088 __ InitializeRootRegister();
1089 __ InitializeSmiConstantRegister();
1099 for (
int i = 0; i < count(); i++) {
1100 int start = masm()->pc_offset();
1104 ASSERT(masm()->pc_offset() - start == table_entry_size_);
1114 #endif // V8_TARGET_ARCH_X64
static int registers_offset()
static const int kCallerFPOffset
Code * builtin(Name name)
static DeoptimizationOutputData * cast(Object *obj)
void PrintF(const char *format,...)
static Smi * FromInt(int value)
void SetFrameSlot(unsigned offset, intptr_t value)
static XMMRegister FromAllocationIndex(int index)
static const int kNumAllocatableRegisters
static const int kCallInstructionLength
#define ASSERT(condition)
static void DeoptimizeFunction(JSFunction *function)
intptr_t GetContext() const
void SetFrameType(StackFrame::Type type)
static const int kNumRegisters
static int double_registers_offset()
static int frame_content_offset()
static int output_offset()
static void set_target_address_at(Address pc, Address target)
static int state_offset()
void SetRegister(unsigned n, intptr_t value)
static unsigned decode(uint32_t value)
friend class DeoptimizingCodeListNode
static Register from_code(int code)
static int GetOutputInfo(DeoptimizationOutputData *data, BailoutId node_id, SharedFunctionInfo *shared)
static void ReplaceCodeForRelatedFunctions(JSFunction *function, Code *code)
uint32_t GetFrameSize() const
void SetContinuation(intptr_t pc)
static int frame_size_offset()
static int output_count_offset()
static const int kNumAllocatableRegisters
static void RevertStackCheckCodeAt(Code *unoptimized_code, Address pc_after, Code *check_code, Code *replacement_code)
static Address target_address_at(Address pc)
intptr_t GetFrameSlot(unsigned offset)
static Address GetDeoptimizationEntry(int id, BailoutType type)
static const int kContextOffset
static const int kFunctionOffset
static const int kCallerPCOffset
#define ASSERT_EQ(v1, v2)
friend class FrameDescription
static uint64_t & uint64_at(Address addr)
virtual void GeneratePrologue()
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if NULL
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
static const int kFrameSize
intptr_t GetRegister(unsigned n) const
static const int kMarkerOffset
void SetDoubleRegister(unsigned n, double value)
static void PatchStackCheckCodeAt(Code *unoptimized_code, Address pc_after, Code *check_code, Code *replacement_code)
static int continuation_offset()
static int input_offset()
static JSFunction * cast(Object *obj)