40 const int kCallInstructionSizeInWords = 4;
47 AssertNoAllocation no_allocation;
49 if (!function->IsOptimized())
return;
53 function->shared()->ClearOptimizedCodeMap();
56 Code*
code =
function->code();
57 Address code_start_address = code->instruction_start();
61 code->InvalidateRelocation();
65 DeoptimizationInputData* deopt_data =
70 for (
int i = 0; i < deopt_data->DeoptCount(); i++) {
71 if (deopt_data->Pc(i)->value() == -1)
continue;
72 Address call_address = code_start_address + deopt_data->Pc(i)->value();
79 CodePatcher patcher(call_address, call_size_in_words);
82 call_address >= prev_call_address +
patch_size());
86 prev_call_address = call_address;
90 Isolate* isolate = code->GetIsolate();
94 DeoptimizerData* data = isolate->deoptimizer_data();
95 node->set_next(data->deoptimizing_code_list_);
96 data->deoptimizing_code_list_ = node;
101 isolate->heap()->mark_compact_collector()->InvalidateCode(code);
105 if (FLAG_trace_deopt) {
106 PrintF(
"[forced deoptimization: ");
107 function->PrintName();
108 PrintF(
" / %x]\n", reinterpret_cast<uint32_t>(
function));
110 if (FLAG_print_code) {
121 Code* replacement_code) {
136 CodePatcher patcher(pc_after - 6 * kInstrSize, 1);
137 patcher.masm()->addiu(at, zero_reg, 1);
141 ASSERT(reinterpret_cast<uint32_t>(
143 reinterpret_cast<uint32_t>(check_code->entry()));
145 replacement_code->entry());
156 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
157 unoptimized_code, pc_after - 4 * kInstrSize, replacement_code);
164 Code* replacement_code) {
172 CodePatcher patcher(pc_after - 6 * kInstrSize, 1);
173 if (FLAG_count_based_interrupts) {
174 patcher.masm()->slt(at, a3, zero_reg);
176 patcher.masm()->sltu(at,
sp, t0);
181 ASSERT(reinterpret_cast<uint32_t>(
183 reinterpret_cast<uint32_t>(replacement_code->entry()));
185 check_code->entry());
187 check_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
188 unoptimized_code, pc_after - 4 * kInstrSize, check_code);
192 static int LookupBailoutId(DeoptimizationInputData* data, BailoutId ast_id) {
193 ByteArray* translations = data->TranslationByteArray();
194 int length = data->DeoptCount();
195 for (
int i = 0; i < length; i++) {
196 if (data->AstId(i) == ast_id) {
197 TranslationIterator it(translations, data->TranslationIndex(i)->value());
198 int value = it.Next();
199 ASSERT(Translation::BEGIN == static_cast<Translation::Opcode>(value));
202 if (value == 1)
return i;
210 void Deoptimizer::DoComputeOsrOutputFrame() {
212 optimized_code_->deoptimization_data());
213 unsigned ast_id = data->OsrAstId()->value();
215 int bailout_id = LookupBailoutId(data, BailoutId(ast_id));
216 unsigned translation_index = data->TranslationIndex(bailout_id)->value();
217 ByteArray* translations = data->TranslationByteArray();
219 TranslationIterator iterator(translations, translation_index);
222 ASSERT(Translation::BEGIN == opcode);
224 int count = iterator.Next();
231 ASSERT(Translation::JS_FRAME == opcode);
232 unsigned node_id = iterator.Next();
234 ASSERT(node_id == ast_id);
235 int closure_id = iterator.Next();
237 ASSERT_EQ(Translation::kSelfLiteralId, closure_id);
238 unsigned height = iterator.Next();
240 USE(height_in_bytes);
242 unsigned fixed_size = ComputeFixedSize(function_);
244 ASSERT(fixed_size + height_in_bytes == input_frame_size);
247 unsigned outgoing_height = data->ArgumentsStackHeight(bailout_id)->value();
248 unsigned outgoing_size = outgoing_height *
kPointerSize;
249 unsigned output_frame_size = fixed_size + stack_slot_size + outgoing_size;
250 ASSERT(outgoing_size == 0);
252 if (FLAG_trace_osr) {
254 reinterpret_cast<intptr_t>(function_));
256 PrintF(
" => node=%u, frame=%d->%d]\n",
266 output_frame_size, function_);
272 int parameter_count = function_->shared()->formal_parameter_count() + 1;
273 for (
int i = 0; i < parameter_count; ++i) {
282 int limit = input_offset - (parameter_count *
kPointerSize);
283 while (ok && input_offset > limit) {
284 ok = DoOsrTranslateCommand(&iterator, &input_offset);
292 uint32_t input_value = input_->
GetFrameSlot(input_offset);
293 if (FLAG_trace_osr) {
294 const char* name =
"UNKNOWN";
297 name =
"caller's pc";
309 PrintF(
" [sp + %d] <- 0x%08x ; [sp + %d] (fixed part - %s)\n",
322 while (ok && input_offset >= 0) {
323 ok = DoOsrTranslateCommand(&iterator, &input_offset);
330 output_[0]->
SetPc(reinterpret_cast<uint32_t>(from_));
336 unsigned pc_offset = data->OsrPcOffset()->value();
337 uint32_t
pc =
reinterpret_cast<uint32_t
>(
338 optimized_code_->
entry() + pc_offset);
339 output_[0]->
SetPc(pc);
341 Code* continuation = isolate_->
builtins()->
builtin(Builtins::kNotifyOSR);
343 reinterpret_cast<uint32_t>(continuation->entry()));
345 if (FLAG_trace_osr) {
347 ok ?
"finished" :
"aborted",
348 reinterpret_cast<intptr_t>(function_));
350 PrintF(
" => pc=0x%0x]\n", output_[0]->GetPc());
355 void Deoptimizer::DoComputeArgumentsAdaptorFrame(TranslationIterator* iterator,
358 unsigned height = iterator->Next();
360 if (FLAG_trace_deopt) {
361 PrintF(
" translating arguments adaptor => height=%d\n", height_in_bytes);
365 unsigned output_frame_size = height_in_bytes + fixed_frame_size;
373 ASSERT(frame_index > 0 && frame_index < output_count_ - 1);
375 output_[frame_index] = output_frame;
379 uint32_t top_address;
380 top_address = output_[frame_index - 1]->
GetTop() - output_frame_size;
381 output_frame->SetTop(top_address);
384 int parameter_count = height;
385 unsigned output_offset = output_frame_size;
386 for (
int i = 0; i < parameter_count; ++i) {
388 DoTranslateCommand(iterator, frame_index, output_offset);
393 intptr_t callers_pc = output_[frame_index - 1]->
GetPc();
394 output_frame->SetFrameSlot(output_offset, callers_pc);
395 if (FLAG_trace_deopt) {
396 PrintF(
" 0x%08x: [top + %d] <- 0x%08x ; caller's pc\n",
397 top_address + output_offset, output_offset, callers_pc);
402 intptr_t value = output_[frame_index - 1]->
GetFp();
403 output_frame->SetFrameSlot(output_offset, value);
405 output_frame->SetFp(fp_value);
406 if (FLAG_trace_deopt) {
407 PrintF(
" 0x%08x: [top + %d] <- 0x%08x ; caller's fp\n",
408 fp_value, output_offset, value);
413 intptr_t context =
reinterpret_cast<intptr_t
>(
415 output_frame->SetFrameSlot(output_offset, context);
416 if (FLAG_trace_deopt) {
417 PrintF(
" 0x%08x: [top + %d] <- 0x%08x ; context (adaptor sentinel)\n",
418 top_address + output_offset, output_offset, context);
423 value =
reinterpret_cast<intptr_t
>(
function);
424 output_frame->SetFrameSlot(output_offset, value);
425 if (FLAG_trace_deopt) {
426 PrintF(
" 0x%08x: [top + %d] <- 0x%08x ; function\n",
427 top_address + output_offset, output_offset, value);
432 value =
reinterpret_cast<uint32_t
>(
Smi::FromInt(height - 1));
433 output_frame->SetFrameSlot(output_offset, value);
434 if (FLAG_trace_deopt) {
435 PrintF(
" 0x%08x: [top + %d] <- 0x%08x ; argc (%d)\n",
436 top_address + output_offset, output_offset, value, height - 1);
439 ASSERT(0 == output_offset);
441 Builtins* builtins = isolate_->
builtins();
442 Code* adaptor_trampoline =
443 builtins->
builtin(Builtins::kArgumentsAdaptorTrampoline);
444 uint32_t pc =
reinterpret_cast<uint32_t
>(
445 adaptor_trampoline->instruction_start() +
446 isolate_->
heap()->arguments_adaptor_deopt_pc_offset()->value());
447 output_frame->SetPc(pc);
451 void Deoptimizer::DoComputeConstructStubFrame(TranslationIterator* iterator,
453 Builtins* builtins = isolate_->
builtins();
454 Code* construct_stub = builtins->
builtin(Builtins::kJSConstructStubGeneric);
456 unsigned height = iterator->Next();
458 if (FLAG_trace_deopt) {
459 PrintF(
" translating construct stub => height=%d\n", height_in_bytes);
463 unsigned output_frame_size = height_in_bytes + fixed_frame_size;
468 output_frame->SetFrameType(StackFrame::CONSTRUCT);
471 ASSERT(frame_index > 0 && frame_index < output_count_ - 1);
473 output_[frame_index] = output_frame;
477 uint32_t top_address;
478 top_address = output_[frame_index - 1]->
GetTop() - output_frame_size;
479 output_frame->SetTop(top_address);
482 int parameter_count = height;
483 unsigned output_offset = output_frame_size;
484 for (
int i = 0; i < parameter_count; ++i) {
486 DoTranslateCommand(iterator, frame_index, output_offset);
491 intptr_t callers_pc = output_[frame_index - 1]->
GetPc();
492 output_frame->SetFrameSlot(output_offset, callers_pc);
493 if (FLAG_trace_deopt) {
494 PrintF(
" 0x%08x: [top + %d] <- 0x%08x ; caller's pc\n",
495 top_address + output_offset, output_offset, callers_pc);
500 intptr_t value = output_[frame_index - 1]->
GetFp();
501 output_frame->SetFrameSlot(output_offset, value);
503 output_frame->SetFp(fp_value);
504 if (FLAG_trace_deopt) {
505 PrintF(
" 0x%08x: [top + %d] <- 0x%08x ; caller's fp\n",
506 fp_value, output_offset, value);
511 value = output_[frame_index - 1]->
GetContext();
512 output_frame->SetFrameSlot(output_offset, value);
513 if (FLAG_trace_deopt) {
514 PrintF(
" 0x%08x: [top + %d] <- 0x%08x ; context\n",
515 top_address + output_offset, output_offset, value);
520 value =
reinterpret_cast<intptr_t
>(
Smi::FromInt(StackFrame::CONSTRUCT));
521 output_frame->SetFrameSlot(output_offset, value);
522 if (FLAG_trace_deopt) {
523 PrintF(
" 0x%08x: [top + %d] <- 0x%08x ; function (construct sentinel)\n",
524 top_address + output_offset, output_offset, value);
529 value =
reinterpret_cast<intptr_t
>(construct_stub);
530 output_frame->SetFrameSlot(output_offset, value);
531 if (FLAG_trace_deopt) {
532 PrintF(
" 0x%08x: [top + %d] <- 0x%08x ; code object\n",
533 top_address + output_offset, output_offset, value);
538 value =
reinterpret_cast<uint32_t
>(
Smi::FromInt(height - 1));
539 output_frame->SetFrameSlot(output_offset, value);
540 if (FLAG_trace_deopt) {
541 PrintF(
" 0x%08x: [top + %d] <- 0x%08x ; argc (%d)\n",
542 top_address + output_offset, output_offset, value, height - 1);
547 value =
reinterpret_cast<intptr_t
>(
function);
548 output_frame->SetFrameSlot(output_offset, value);
549 if (FLAG_trace_deopt) {
550 PrintF(
" 0x%08x: [top + %d] <- 0x%08x ; constructor function\n",
551 top_address + output_offset, output_offset, value);
557 value = output_frame->GetFrameSlot(output_frame_size - kPointerSize);
558 output_frame->SetFrameSlot(output_offset, value);
559 if (FLAG_trace_deopt) {
560 PrintF(
" 0x%08x: [top + %d] <- 0x%08x ; allocated receiver\n",
561 top_address + output_offset, output_offset, value);
564 ASSERT(0 == output_offset);
566 uint32_t pc =
reinterpret_cast<uint32_t
>(
567 construct_stub->instruction_start() +
568 isolate_->
heap()->construct_stub_deopt_pc_offset()->value());
569 output_frame->SetPc(pc);
573 void Deoptimizer::DoComputeAccessorStubFrame(TranslationIterator* iterator,
575 bool is_setter_stub_frame) {
582 const char* kind = is_setter_stub_frame ?
"setter" :
"getter";
583 if (FLAG_trace_deopt) {
584 PrintF(
" translating %s stub => height=%u\n", kind, height_in_bytes);
591 unsigned fixed_frame_entries = 5 + (is_setter_stub_frame ? 1 : 0);
592 unsigned fixed_frame_size = fixed_frame_entries *
kPointerSize;
593 unsigned output_frame_size = height_in_bytes + fixed_frame_size;
601 ASSERT(frame_index > 0 && frame_index < output_count_ - 1);
603 output_[frame_index] = output_frame;
607 uint32_t top_address = output_[frame_index - 1]->
GetTop() - output_frame_size;
608 output_frame->SetTop(top_address);
610 unsigned output_offset = output_frame_size;
614 intptr_t value = output_[frame_index - 1]->
GetPc();
615 output_frame->SetFrameSlot(output_offset, value);
616 if (FLAG_trace_deopt) {
619 top_address + output_offset, output_offset, value);
624 value = output_[frame_index - 1]->
GetFp();
625 output_frame->SetFrameSlot(output_offset, value);
627 output_frame->SetFp(fp_value);
628 if (FLAG_trace_deopt) {
631 fp_value, output_offset, value);
636 value = output_[frame_index - 1]->
GetContext();
637 output_frame->SetFrameSlot(output_offset, value);
638 if (FLAG_trace_deopt) {
641 top_address + output_offset, output_offset, value);
647 output_frame->SetFrameSlot(output_offset, value);
648 if (FLAG_trace_deopt) {
650 " ; function (%s sentinel)\n",
651 top_address + output_offset, output_offset, value, kind);
657 Builtins::kStoreIC_Setter_ForDeopt :
658 Builtins::kLoadIC_Getter_ForDeopt;
660 value =
reinterpret_cast<intptr_t
>(accessor_stub);
661 output_frame->SetFrameSlot(output_offset, value);
662 if (FLAG_trace_deopt) {
665 top_address + output_offset, output_offset, value);
671 iterator->Skip(Translation::NumberOfOperandsFor(opcode));
673 if (is_setter_stub_frame) {
677 DoTranslateCommand(iterator, frame_index, output_offset);
680 ASSERT(0 == output_offset);
682 Smi* offset = is_setter_stub_frame ?
683 isolate_->
heap()->setter_stub_deopt_pc_offset() :
684 isolate_->
heap()->getter_stub_deopt_pc_offset();
685 intptr_t pc =
reinterpret_cast<intptr_t
>(
686 accessor_stub->instruction_start() + offset->value());
687 output_frame->SetPc(pc);
693 void Deoptimizer::DoComputeJSFrame(TranslationIterator* iterator,
696 BailoutId node_id = BailoutId(iterator->Next());
697 JSFunction*
function;
698 if (frame_index != 0) {
701 int closure_id = iterator->Next();
703 ASSERT_EQ(Translation::kSelfLiteralId, closure_id);
704 function = function_;
706 unsigned height = iterator->Next();
708 if (FLAG_trace_deopt) {
710 function->PrintName();
711 PrintF(
" => node=%d, height=%d\n", node_id.ToInt(), height_in_bytes);
716 unsigned fixed_frame_size = ComputeFixedSize(
function);
718 unsigned output_frame_size = height_in_bytes + fixed_frame_size;
723 output_frame->SetFrameType(StackFrame::JAVA_SCRIPT);
725 bool is_bottommost = (0 == frame_index);
726 bool is_topmost = (output_count_ - 1 == frame_index);
727 ASSERT(frame_index >= 0 && frame_index < output_count_);
729 output_[frame_index] = output_frame;
735 uint32_t top_address;
741 top_address = output_[frame_index - 1]->
GetTop() - output_frame_size;
743 output_frame->SetTop(top_address);
746 int parameter_count =
function->shared()->formal_parameter_count() + 1;
747 unsigned output_offset = output_frame_size;
748 unsigned input_offset = input_frame_size;
749 for (
int i = 0; i < parameter_count; ++i) {
751 DoTranslateCommand(iterator, frame_index, output_offset);
769 value = output_[frame_index - 1]->
GetPc();
771 output_frame->SetFrameSlot(output_offset, value);
772 if (FLAG_trace_deopt) {
773 PrintF(
" 0x%08x: [top + %d] <- 0x%08x ; caller's pc\n",
774 top_address + output_offset, output_offset, value);
786 value = output_[frame_index - 1]->
GetFp();
788 output_frame->SetFrameSlot(output_offset, value);
791 output_frame->SetFp(fp_value);
793 output_frame->SetRegister(
fp.
code(), fp_value);
795 if (FLAG_trace_deopt) {
796 PrintF(
" 0x%08x: [top + %d] <- 0x%08x ; caller's fp\n",
797 fp_value, output_offset, value);
808 value =
reinterpret_cast<intptr_t
>(
function->context());
810 output_frame->SetFrameSlot(output_offset, value);
811 output_frame->SetContext(value);
812 if (is_topmost) output_frame->SetRegister(
cp.
code(), value);
813 if (FLAG_trace_deopt) {
814 PrintF(
" 0x%08x: [top + %d] <- 0x%08x ; context\n",
815 top_address + output_offset, output_offset, value);
821 value =
reinterpret_cast<uint32_t
>(
function);
825 output_frame->SetFrameSlot(output_offset, value);
826 if (FLAG_trace_deopt) {
827 PrintF(
" 0x%08x: [top + %d] <- 0x%08x ; function\n",
828 top_address + output_offset, output_offset, value);
832 for (
unsigned i = 0; i < height; ++i) {
834 DoTranslateCommand(iterator, frame_index, output_offset);
836 ASSERT(0 == output_offset);
839 Code* non_optimized_code =
function->shared()->code();
840 FixedArray* raw_data = non_optimized_code->deoptimization_data();
842 Address start = non_optimized_code->instruction_start();
843 unsigned pc_and_state =
GetOutputInfo(data, node_id, function->shared());
845 uint32_t pc_value =
reinterpret_cast<uint32_t
>(start + pc_offset);
846 output_frame->SetPc(pc_value);
854 if (is_topmost && bailout_type_ !=
DEBUGGER) {
855 Builtins* builtins = isolate_->
builtins();
856 Code* continuation = (bailout_type_ ==
EAGER)
857 ? builtins->
builtin(Builtins::kNotifyDeoptimized)
858 : builtins->builtin(Builtins::kNotifyLazyDeoptimized);
859 output_frame->SetContinuation(
860 reinterpret_cast<uint32_t>(continuation->entry()));
864 void Deoptimizer::FillInputFrame(
Address tos, JavaScriptFrame* frame) {
890 void Deoptimizer::EntryGenerator::Generate() {
893 Isolate* isolate = masm()->isolate();
895 CpuFeatures::Scope scope(
FPU);
901 RegList saved_regs = restored_regs |
sp.
bit() | ra.bit();
903 const int kDoubleRegsSize =
907 __ Subu(
sp,
sp, Operand(kDoubleRegsSize));
916 __ Subu(
sp,
sp, kNumberOfRegisters * kPointerSize);
917 for (
int16_t i = kNumberOfRegisters - 1; i >= 0; i--) {
918 if ((saved_regs & (1 << i)) != 0) {
923 const int kSavedRegistersAreaSize =
932 if (type() == EAGER) {
933 __ mov(a3, zero_reg);
935 __ Addu(t0,
sp, Operand(kSavedRegistersAreaSize + (1 * kPointerSize)));
936 }
else if (type() == OSR) {
939 __ Addu(t0,
sp, Operand(kSavedRegistersAreaSize + (1 * kPointerSize)));
943 __ Addu(t0,
sp, Operand(kSavedRegistersAreaSize + (2 * kPointerSize)));
950 __ PrepareCallCFunction(6, t1);
952 __ li(a1, Operand(type()));
956 __ li(t1, Operand(ExternalReference::isolate_address()));
960 AllowExternalCallThatCantCauseGC scope(masm());
961 __ CallCFunction(ExternalReference::new_deoptimizer_function(isolate), 6);
971 ASSERT(Register::kNumRegisters == kNumberOfRegisters);
972 for (
int i = 0; i < kNumberOfRegisters; i++) {
974 if ((saved_regs & (1 << i)) != 0) {
977 }
else if (FLAG_debug_code) {
987 int dst_offset = i *
kDoubleSize + double_regs_offset;
995 if (type() == EAGER || type() == OSR) {
996 __ Addu(
sp,
sp, Operand(kSavedRegistersAreaSize + (1 * kPointerSize)));
998 __ Addu(
sp,
sp, Operand(kSavedRegistersAreaSize + (2 * kPointerSize)));
1004 __ Addu(a2, a2,
sp);
1015 __ addiu(a3, a3,
sizeof(uint32_t));
1020 __ PrepareCallCFunction(1, a1);
1023 AllowExternalCallThatCantCauseGC scope(masm());
1025 ExternalReference::compute_output_frames_function(isolate), 1);
1030 Label outer_push_loop, inner_push_loop;
1036 __ addu(a1, a0, a1);
1037 __ bind(&outer_push_loop);
1041 __ bind(&inner_push_loop);
1042 __ Subu(a3, a3, Operand(
sizeof(uint32_t)));
1043 __ Addu(t2, a2, Operand(a3));
1046 __ Branch(&inner_push_loop,
ne, a3, Operand(zero_reg));
1048 __ Addu(a0, a0, Operand(kPointerSize));
1049 __ Branch(&outer_push_loop,
lt, a0, Operand(a1));
1053 if (type() != OSR) {
1066 ASSERT(!(at.bit() & restored_regs));
1069 for (
int i = kNumberOfRegisters - 1; i >= 0; i--) {
1071 if ((restored_regs & (1 << i)) != 0) {
1076 __ InitializeRootRegister();
1081 __ stop(
"Unreachable.");
1094 __ bind(&table_start);
1095 for (
int i = 0; i < count(); i++) {
1098 if (type() !=
EAGER) {
1100 __ addiu(
sp,
sp, -2 * kPointerSize);
1103 __ addiu(
sp,
sp, -1 * kPointerSize);
1108 const int remaining_entries = (count() - i) * table_entry_size_;
1109 __ Addu(t9, t9, remaining_entries);
1116 while (table_entry_size_ > (masm()->SizeOfCodeGeneratedSince(&start))) {
1120 ASSERT_EQ(table_entry_size_, masm()->SizeOfCodeGeneratedSince(&start));
1123 ASSERT_EQ(masm()->SizeOfCodeGeneratedSince(&table_start),
1124 count() * table_entry_size_);
static int registers_offset()
static const int kCallerFPOffset
Code * builtin(Name name)
static DeoptimizationOutputData * cast(Object *obj)
static bool IsAddImmediate(Instr instr)
void PrintF(const char *format,...)
static Smi * FromInt(int value)
void SetFrameSlot(unsigned offset, intptr_t value)
static const int kNumAllocatableRegisters
#define ASSERT(condition)
const RegList kJSCallerSaved
static void DeoptimizeFunction(JSFunction *function)
const int kPointerSizeLog2
intptr_t GetContext() const
void SetFrameType(StackFrame::Type type)
static const int kNumRegisters
static int double_registers_offset()
static int frame_content_offset()
static int output_offset()
static void set_target_address_at(Address pc, Address target)
static int state_offset()
const RegList kCalleeSaved
void SetRegister(unsigned n, intptr_t value)
static unsigned decode(uint32_t value)
friend class DeoptimizingCodeListNode
static int GetOutputInfo(DeoptimizationOutputData *data, BailoutId node_id, SharedFunctionInfo *shared)
static void ReplaceCodeForRelatedFunctions(JSFunction *function, Code *code)
static int CallSize(Register target, Condition cond=al)
friend class BlockTrampolinePoolScope
uint32_t GetFrameSize() const
void SetContinuation(intptr_t pc)
static int frame_size_offset()
static int output_count_offset()
static void RevertStackCheckCodeAt(Code *unoptimized_code, Address pc_after, Code *check_code, Code *replacement_code)
static Address target_address_at(Address pc)
intptr_t GetFrameSlot(unsigned offset)
MemOperand CFunctionArgumentOperand(int index)
static Address GetDeoptimizationEntry(int id, BailoutType type)
static const int kContextOffset
static const int kFunctionOffset
static const int kCallerPCOffset
#define ASSERT_EQ(v1, v2)
friend class FrameDescription
virtual void GeneratePrologue()
const uint32_t kDebugZapValue
static bool IsBeq(Instr instr)
static const int kInstrSize
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if NULL
static uint32_t & uint32_at(Address addr)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
static const int kNumAllocatableRegisters
static const int kFrameSize
intptr_t GetRegister(unsigned n) const
static const int kMarkerOffset
static FPURegister FromAllocationIndex(int index)
void SetDoubleRegister(unsigned n, double value)
Register ToRegister(int num)
static void PatchStackCheckCodeAt(Code *unoptimized_code, Address pc_after, Code *check_code, Code *replacement_code)
static int continuation_offset()
static int input_offset()
static JSFunction * cast(Object *obj)