30 #if defined(V8_TARGET_ARCH_IA32)
40 const int Deoptimizer::table_entry_size_ = 10;
49 Isolate* isolate = code->GetIsolate();
50 HandleScope scope(isolate);
54 int min_reloc_size = 0;
55 int prev_pc_offset = 0;
56 DeoptimizationInputData* deopt_data =
58 for (
int i = 0; i < deopt_data->DeoptCount(); i++) {
59 int pc_offset = deopt_data->Pc(i)->value();
60 if (pc_offset == -1)
continue;
62 int pc_delta = pc_offset - prev_pc_offset;
66 if (pc_delta <= RelocInfo::kMaxSmallPCDelta) {
71 prev_pc_offset = pc_offset;
77 int reloc_length = code->relocation_info()->length();
78 if (min_reloc_size > reloc_length) {
79 int comment_reloc_size = RelocInfo::kMinRelocCommentSize;
81 int min_padding = min_reloc_size - reloc_length;
83 int additional_comments =
84 (min_padding + comment_reloc_size - 1) / comment_reloc_size;
86 int padding = additional_comments * comment_reloc_size;
90 Factory* factory = isolate->factory();
91 Handle<ByteArray> new_reloc =
92 factory->NewByteArray(reloc_length + padding,
TENURED);
93 memcpy(new_reloc->GetDataStartAddress() + padding,
94 code->relocation_info()->GetDataStartAddress(),
98 RelocInfoWriter reloc_info_writer(
99 new_reloc->GetDataStartAddress() + padding, 0);
100 intptr_t comment_string
101 =
reinterpret_cast<intptr_t
>(RelocInfo::kFillerCommentString);
102 RelocInfo rinfo(0, RelocInfo::COMMENT, comment_string,
NULL);
103 for (
int i = 0; i < additional_comments; ++i) {
105 byte* pos_before = reloc_info_writer.pos();
107 reloc_info_writer.Write(&rinfo);
108 ASSERT(RelocInfo::kMinRelocCommentSize ==
109 pos_before - reloc_info_writer.pos());
112 code->set_relocation_info(*new_reloc);
118 if (!function->IsOptimized())
return;
122 function->shared()->ClearOptimizedCodeMap();
124 Isolate* isolate =
function->GetIsolate();
125 HandleScope scope(isolate);
126 AssertNoAllocation no_allocation;
129 Code* code =
function->code();
130 Address code_start_address = code->instruction_start();
136 ByteArray* reloc_info = code->relocation_info();
137 Address reloc_end_address = reloc_info->address() + reloc_info->Size();
138 RelocInfoWriter reloc_info_writer(reloc_end_address, code_start_address);
148 DeoptimizationInputData* deopt_data =
153 for (
int i = 0; i < deopt_data->DeoptCount(); i++) {
154 if (deopt_data->Pc(i)->value() == -1)
continue;
156 Address call_address = code_start_address + deopt_data->Pc(i)->value();
157 CodePatcher patcher(call_address,
patch_size());
161 RelocInfo rinfo(call_address + 1,
163 reinterpret_cast<intptr_t>(deopt_entry),
165 reloc_info_writer.Write(&rinfo);
169 call_address >= prev_call_address +
patch_size());
172 prev_call_address = call_address;
177 int new_reloc_size = reloc_end_address - reloc_info_writer.pos();
178 memmove(code->relocation_start(), reloc_info_writer.pos(), new_reloc_size);
181 reloc_info->set_length(new_reloc_size);
185 Address junk_address = reloc_info->address() + reloc_info->Size();
186 ASSERT(junk_address <= reloc_end_address);
187 isolate->heap()->CreateFillerObjectAt(junk_address,
188 reloc_end_address - junk_address);
192 DeoptimizerData* data = isolate->deoptimizer_data();
193 node->set_next(data->deoptimizing_code_list_);
194 data->deoptimizing_code_list_ = node;
199 isolate->heap()->mark_compact_collector()->InvalidateCode(code);
203 if (FLAG_trace_deopt) {
204 PrintF(
"[forced deoptimization: ");
205 function->PrintName();
206 PrintF(
" / %x]\n", reinterpret_cast<uint32_t>(
function));
211 static const byte kJnsInstruction = 0x79;
212 static const byte kJnsOffset = 0x13;
213 static const byte kJaeInstruction = 0x73;
214 static const byte kJaeOffset = 0x07;
215 static const byte kCallInstruction = 0xe8;
216 static const byte kNopByteOne = 0x66;
217 static const byte kNopByteTwo = 0x90;
223 Code* replacement_code) {
244 if (FLAG_count_based_interrupts) {
245 ASSERT_EQ(kJnsInstruction, *(call_target_address - 3));
246 ASSERT_EQ(kJnsOffset, *(call_target_address - 2));
248 ASSERT_EQ(kJaeInstruction, *(call_target_address - 3));
249 ASSERT_EQ(kJaeOffset, *(call_target_address - 2));
251 ASSERT_EQ(kCallInstruction, *(call_target_address - 1));
252 *(call_target_address - 3) = kNopByteOne;
253 *(call_target_address - 2) = kNopByteTwo;
255 replacement_code->entry());
257 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
258 unoptimized_code, call_target_address, replacement_code);
265 Code* replacement_code) {
272 ASSERT_EQ(kNopByteOne, *(call_target_address - 3));
273 ASSERT_EQ(kNopByteTwo, *(call_target_address - 2));
274 ASSERT_EQ(kCallInstruction, *(call_target_address - 1));
275 if (FLAG_count_based_interrupts) {
276 *(call_target_address - 3) = kJnsInstruction;
277 *(call_target_address - 2) = kJnsOffset;
279 *(call_target_address - 3) = kJaeInstruction;
280 *(call_target_address - 2) = kJaeOffset;
283 check_code->entry());
285 check_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
286 unoptimized_code, call_target_address, check_code);
290 static int LookupBailoutId(DeoptimizationInputData* data, BailoutId ast_id) {
291 ByteArray* translations = data->TranslationByteArray();
292 int length = data->DeoptCount();
293 for (
int i = 0; i < length; i++) {
294 if (data->AstId(i) == ast_id) {
295 TranslationIterator it(translations, data->TranslationIndex(i)->value());
296 int value = it.Next();
297 ASSERT(Translation::BEGIN == static_cast<Translation::Opcode>(value));
300 if (value == 1)
return i;
308 void Deoptimizer::DoComputeOsrOutputFrame() {
310 optimized_code_->deoptimization_data());
311 unsigned ast_id = data->OsrAstId()->value();
314 ASSERT(bailout_id_ == ast_id);
316 int bailout_id = LookupBailoutId(data, BailoutId(ast_id));
317 unsigned translation_index = data->TranslationIndex(bailout_id)->value();
318 ByteArray* translations = data->TranslationByteArray();
320 TranslationIterator iterator(translations, translation_index);
323 ASSERT(Translation::BEGIN == opcode);
325 int count = iterator.Next();
332 ASSERT(Translation::JS_FRAME == opcode);
333 unsigned node_id = iterator.Next();
335 ASSERT(node_id == ast_id);
336 int closure_id = iterator.Next();
338 ASSERT_EQ(Translation::kSelfLiteralId, closure_id);
339 unsigned height = iterator.Next();
341 USE(height_in_bytes);
343 unsigned fixed_size = ComputeFixedSize(function_);
345 ASSERT(fixed_size + height_in_bytes == input_frame_size);
348 unsigned outgoing_height = data->ArgumentsStackHeight(bailout_id)->value();
349 unsigned outgoing_size = outgoing_height *
kPointerSize;
350 unsigned output_frame_size = fixed_size + stack_slot_size + outgoing_size;
351 ASSERT(outgoing_size == 0);
353 if (FLAG_trace_osr) {
355 reinterpret_cast<intptr_t>(function_));
357 PrintF(
" => node=%u, frame=%d->%d, ebp:esp=0x%08x:0x%08x]\n",
369 output_frame_size, function_);
375 int parameter_count = function_->shared()->formal_parameter_count() + 1;
376 for (
int i = 0; i < parameter_count; ++i) {
385 int limit = input_offset - (parameter_count *
kPointerSize);
386 while (ok && input_offset > limit) {
387 ok = DoOsrTranslateCommand(&iterator, &input_offset);
395 uint32_t input_value = input_->
GetFrameSlot(input_offset);
396 if (FLAG_trace_osr) {
397 const char* name =
"UNKNOWN";
400 name =
"caller's pc";
412 PrintF(
" [sp + %d] <- 0x%08x ; [sp + %d] (fixed part - %s)\n",
425 if ((frame_pointer & kPointerSize) != 0) {
427 has_alignment_padding_ = 1;
430 int32_t alignment_state = (has_alignment_padding_ == 1) ?
433 if (FLAG_trace_osr) {
434 PrintF(
" [sp + %d] <- 0x%08x ; (alignment state)\n",
438 output_[0]->
SetFrameSlot(output_offset, alignment_state);
442 while (ok && input_offset >= 0) {
443 ok = DoOsrTranslateCommand(&iterator, &input_offset);
450 output_[0]->
SetPc(reinterpret_cast<uint32_t>(from_));
456 unsigned pc_offset = data->OsrPcOffset()->value();
457 uint32_t
pc =
reinterpret_cast<uint32_t
>(
458 optimized_code_->
entry() + pc_offset);
459 output_[0]->
SetPc(pc);
464 reinterpret_cast<uint32_t>(continuation->entry()));
466 if (FLAG_trace_osr) {
468 ok ?
"finished" :
"aborted",
469 reinterpret_cast<intptr_t>(function_));
471 PrintF(
" => pc=0x%0x]\n", output_[0]->GetPc());
476 void Deoptimizer::DoComputeArgumentsAdaptorFrame(TranslationIterator* iterator,
479 unsigned height = iterator->Next();
481 if (FLAG_trace_deopt) {
482 PrintF(
" translating arguments adaptor => height=%d\n", height_in_bytes);
486 unsigned output_frame_size = height_in_bytes + fixed_frame_size;
494 ASSERT(frame_index > 0 && frame_index < output_count_ - 1);
496 output_[frame_index] = output_frame;
500 uint32_t top_address;
501 top_address = output_[frame_index - 1]->
GetTop() - output_frame_size;
502 output_frame->SetTop(top_address);
505 int parameter_count = height;
506 unsigned output_offset = output_frame_size;
507 for (
int i = 0; i < parameter_count; ++i) {
509 DoTranslateCommand(iterator, frame_index, output_offset);
514 intptr_t callers_pc = output_[frame_index - 1]->
GetPc();
515 output_frame->SetFrameSlot(output_offset, callers_pc);
516 if (FLAG_trace_deopt) {
517 PrintF(
" 0x%08x: [top + %d] <- 0x%08x ; caller's pc\n",
518 top_address + output_offset, output_offset, callers_pc);
523 intptr_t value = output_[frame_index - 1]->
GetFp();
524 output_frame->SetFrameSlot(output_offset, value);
526 output_frame->SetFp(fp_value);
527 if (FLAG_trace_deopt) {
528 PrintF(
" 0x%08x: [top + %d] <- 0x%08x ; caller's fp\n",
529 fp_value, output_offset, value);
534 intptr_t context =
reinterpret_cast<intptr_t
>(
536 output_frame->SetFrameSlot(output_offset, context);
537 if (FLAG_trace_deopt) {
538 PrintF(
" 0x%08x: [top + %d] <- 0x%08x ; context (adaptor sentinel)\n",
539 top_address + output_offset, output_offset, context);
544 value =
reinterpret_cast<intptr_t
>(
function);
545 output_frame->SetFrameSlot(output_offset, value);
546 if (FLAG_trace_deopt) {
547 PrintF(
" 0x%08x: [top + %d] <- 0x%08x ; function\n",
548 top_address + output_offset, output_offset, value);
553 value =
reinterpret_cast<uint32_t
>(
Smi::FromInt(height - 1));
554 output_frame->SetFrameSlot(output_offset, value);
555 if (FLAG_trace_deopt) {
556 PrintF(
" 0x%08x: [top + %d] <- 0x%08x ; argc (%d)\n",
557 top_address + output_offset, output_offset, value, height - 1);
560 ASSERT(0 == output_offset);
562 Builtins* builtins = isolate_->
builtins();
563 Code* adaptor_trampoline =
564 builtins->
builtin(Builtins::kArgumentsAdaptorTrampoline);
565 uint32_t pc =
reinterpret_cast<uint32_t
>(
566 adaptor_trampoline->instruction_start() +
567 isolate_->
heap()->arguments_adaptor_deopt_pc_offset()->value());
568 output_frame->SetPc(pc);
572 void Deoptimizer::DoComputeConstructStubFrame(TranslationIterator* iterator,
574 Builtins* builtins = isolate_->
builtins();
575 Code* construct_stub = builtins->
builtin(Builtins::kJSConstructStubGeneric);
577 unsigned height = iterator->Next();
579 if (FLAG_trace_deopt) {
580 PrintF(
" translating construct stub => height=%d\n", height_in_bytes);
584 unsigned output_frame_size = height_in_bytes + fixed_frame_size;
589 output_frame->SetFrameType(StackFrame::CONSTRUCT);
592 ASSERT(frame_index > 0 && frame_index < output_count_ - 1);
594 output_[frame_index] = output_frame;
598 uint32_t top_address;
599 top_address = output_[frame_index - 1]->
GetTop() - output_frame_size;
600 output_frame->SetTop(top_address);
603 int parameter_count = height;
604 unsigned output_offset = output_frame_size;
605 for (
int i = 0; i < parameter_count; ++i) {
607 DoTranslateCommand(iterator, frame_index, output_offset);
612 intptr_t callers_pc = output_[frame_index - 1]->
GetPc();
613 output_frame->SetFrameSlot(output_offset, callers_pc);
614 if (FLAG_trace_deopt) {
615 PrintF(
" 0x%08x: [top + %d] <- 0x%08x ; caller's pc\n",
616 top_address + output_offset, output_offset, callers_pc);
621 intptr_t value = output_[frame_index - 1]->
GetFp();
622 output_frame->SetFrameSlot(output_offset, value);
624 output_frame->SetFp(fp_value);
625 if (FLAG_trace_deopt) {
626 PrintF(
" 0x%08x: [top + %d] <- 0x%08x ; caller's fp\n",
627 fp_value, output_offset, value);
632 value = output_[frame_index - 1]->
GetContext();
633 output_frame->SetFrameSlot(output_offset, value);
634 if (FLAG_trace_deopt) {
635 PrintF(
" 0x%08x: [top + %d] <- 0x%08x ; context\n",
636 top_address + output_offset, output_offset, value);
641 value =
reinterpret_cast<intptr_t
>(
Smi::FromInt(StackFrame::CONSTRUCT));
642 output_frame->SetFrameSlot(output_offset, value);
643 if (FLAG_trace_deopt) {
644 PrintF(
" 0x%08x: [top + %d] <- 0x%08x ; function (construct sentinel)\n",
645 top_address + output_offset, output_offset, value);
650 value =
reinterpret_cast<intptr_t
>(construct_stub);
651 output_frame->SetFrameSlot(output_offset, value);
652 if (FLAG_trace_deopt) {
653 PrintF(
" 0x%08x: [top + %d] <- 0x%08x ; code object\n",
654 top_address + output_offset, output_offset, value);
659 value =
reinterpret_cast<uint32_t
>(
Smi::FromInt(height - 1));
660 output_frame->SetFrameSlot(output_offset, value);
661 if (FLAG_trace_deopt) {
662 PrintF(
" 0x%08x: [top + %d] <- 0x%08x ; argc (%d)\n",
663 top_address + output_offset, output_offset, value, height - 1);
669 value = output_frame->GetFrameSlot(output_frame_size - kPointerSize);
670 output_frame->SetFrameSlot(output_offset, value);
671 if (FLAG_trace_deopt) {
672 PrintF(
" 0x%08x: [top + %d] <- 0x%08x ; allocated receiver\n",
673 top_address + output_offset, output_offset, value);
676 ASSERT(0 == output_offset);
678 uint32_t pc =
reinterpret_cast<uint32_t
>(
679 construct_stub->instruction_start() +
680 isolate_->
heap()->construct_stub_deopt_pc_offset()->value());
681 output_frame->SetPc(pc);
685 void Deoptimizer::DoComputeAccessorStubFrame(TranslationIterator* iterator,
687 bool is_setter_stub_frame) {
694 const char* kind = is_setter_stub_frame ?
"setter" :
"getter";
695 if (FLAG_trace_deopt) {
696 PrintF(
" translating %s stub => height=%u\n", kind, height_in_bytes);
704 unsigned fixed_frame_entries = 1 + 4 + (is_setter_stub_frame ? 1 : 0);
705 unsigned fixed_frame_size = fixed_frame_entries *
kPointerSize;
706 unsigned output_frame_size = height_in_bytes + fixed_frame_size;
714 ASSERT(frame_index > 0 && frame_index < output_count_ - 1);
716 output_[frame_index] = output_frame;
720 intptr_t top_address = output_[frame_index - 1]->
GetTop() - output_frame_size;
721 output_frame->SetTop(top_address);
723 unsigned output_offset = output_frame_size;
727 intptr_t callers_pc = output_[frame_index - 1]->
GetPc();
728 output_frame->SetFrameSlot(output_offset, callers_pc);
729 if (FLAG_trace_deopt) {
732 top_address + output_offset, output_offset, callers_pc);
737 intptr_t value = output_[frame_index - 1]->
GetFp();
738 output_frame->SetFrameSlot(output_offset, value);
740 output_frame->SetFp(fp_value);
741 if (FLAG_trace_deopt) {
744 fp_value, output_offset, value);
749 value = output_[frame_index - 1]->
GetContext();
750 output_frame->SetFrameSlot(output_offset, value);
751 if (FLAG_trace_deopt) {
754 top_address + output_offset, output_offset, value);
760 output_frame->SetFrameSlot(output_offset, value);
761 if (FLAG_trace_deopt) {
763 " ; function (%s sentinel)\n",
764 top_address + output_offset, output_offset, value, kind);
770 Builtins::kStoreIC_Setter_ForDeopt :
771 Builtins::kLoadIC_Getter_ForDeopt;
773 value =
reinterpret_cast<intptr_t
>(accessor_stub);
774 output_frame->SetFrameSlot(output_offset, value);
775 if (FLAG_trace_deopt) {
778 top_address + output_offset, output_offset, value);
784 iterator->Skip(Translation::NumberOfOperandsFor(opcode));
786 if (is_setter_stub_frame) {
790 DoTranslateCommand(iterator, frame_index, output_offset);
793 ASSERT(0 == output_offset);
795 Smi* offset = is_setter_stub_frame ?
796 isolate_->
heap()->setter_stub_deopt_pc_offset() :
797 isolate_->
heap()->getter_stub_deopt_pc_offset();
798 intptr_t pc =
reinterpret_cast<intptr_t
>(
799 accessor_stub->instruction_start() + offset->value());
800 output_frame->SetPc(pc);
804 void Deoptimizer::DoComputeJSFrame(TranslationIterator* iterator,
806 BailoutId node_id = BailoutId(iterator->Next());
807 JSFunction*
function;
808 if (frame_index != 0) {
811 int closure_id = iterator->Next();
813 ASSERT_EQ(Translation::kSelfLiteralId, closure_id);
814 function = function_;
816 unsigned height = iterator->Next();
818 if (FLAG_trace_deopt) {
820 function->PrintName();
821 PrintF(
" => node=%d, height=%d\n", node_id.ToInt(), height_in_bytes);
826 unsigned fixed_frame_size = ComputeFixedSize(
function);
828 unsigned output_frame_size = height_in_bytes + fixed_frame_size;
833 output_frame->SetFrameType(StackFrame::JAVA_SCRIPT);
835 bool is_bottommost = (0 == frame_index);
836 bool is_topmost = (output_count_ - 1 == frame_index);
837 ASSERT(frame_index >= 0 && frame_index < output_count_);
839 output_[frame_index] = output_frame;
842 int parameter_count =
function->shared()->formal_parameter_count() + 1;
843 unsigned output_offset = output_frame_size;
844 unsigned input_offset = input_frame_size;
846 unsigned alignment_state_offset =
847 input_offset - parameter_count * kPointerSize -
857 uint32_t top_address;
860 has_alignment_padding_ =
867 height_in_bytes + has_alignment_padding_ *
kPointerSize;
869 top_address = output_[frame_index - 1]->
GetTop() - output_frame_size;
871 output_frame->SetTop(top_address);
873 for (
int i = 0; i < parameter_count; ++i) {
875 DoTranslateCommand(iterator, frame_index, output_offset);
893 value = output_[frame_index - 1]->
GetPc();
895 output_frame->SetFrameSlot(output_offset, value);
896 if (FLAG_trace_deopt) {
897 PrintF(
" 0x%08x: [top + %d] <- 0x%08x ; caller's pc\n",
898 top_address + output_offset, output_offset, value);
910 value = output_[frame_index - 1]->
GetFp();
912 output_frame->SetFrameSlot(output_offset, value);
917 output_frame->SetFp(fp_value);
918 if (is_topmost) output_frame->SetRegister(
ebp.
code(), fp_value);
919 if (FLAG_trace_deopt) {
920 PrintF(
" 0x%08x: [top + %d] <- 0x%08x ; caller's fp\n",
921 fp_value, output_offset, value);
923 ASSERT(!is_bottommost || !has_alignment_padding_ ||
924 (fp_value & kPointerSize) != 0);
934 value =
reinterpret_cast<uint32_t
>(
function->context());
936 output_frame->SetFrameSlot(output_offset, value);
937 output_frame->SetContext(value);
938 if (is_topmost) output_frame->SetRegister(
esi.
code(), value);
939 if (FLAG_trace_deopt) {
940 PrintF(
" 0x%08x: [top + %d] <- 0x%08x ; context\n",
941 top_address + output_offset, output_offset, value);
947 value =
reinterpret_cast<uint32_t
>(
function);
951 output_frame->SetFrameSlot(output_offset, value);
952 if (FLAG_trace_deopt) {
953 PrintF(
" 0x%08x: [top + %d] <- 0x%08x ; function\n",
954 top_address + output_offset, output_offset, value);
958 for (
unsigned i = 0; i < height; ++i) {
960 DoTranslateCommand(iterator, frame_index, output_offset);
962 ASSERT(0 == output_offset);
965 Code* non_optimized_code =
function->shared()->code();
966 FixedArray* raw_data = non_optimized_code->deoptimization_data();
968 Address start = non_optimized_code->instruction_start();
969 unsigned pc_and_state =
GetOutputInfo(data, node_id, function->shared());
971 uint32_t pc_value =
reinterpret_cast<uint32_t
>(start + pc_offset);
972 output_frame->SetPc(pc_value);
979 if (is_topmost && bailout_type_ !=
DEBUGGER) {
980 Builtins* builtins = isolate_->
builtins();
981 Code* continuation = (bailout_type_ ==
EAGER)
982 ? builtins->
builtin(Builtins::kNotifyDeoptimized)
983 : builtins->builtin(Builtins::kNotifyLazyDeoptimized);
984 output_frame->SetContinuation(
985 reinterpret_cast<uint32_t>(continuation->entry()));
990 void Deoptimizer::FillInputFrame(
Address tos, JavaScriptFrame* frame) {
1013 void Deoptimizer::EntryGenerator::Generate() {
1015 CpuFeatures::Scope scope(
SSE2);
1017 Isolate* isolate = masm()->isolate();
1024 __ sub(
esp, Immediate(kDoubleRegsSize));
1028 __ movdbl(Operand(
esp, offset), xmm_reg);
1033 const int kSavedRegistersAreaSize = kNumberOfRegisters * kPointerSize +
1037 __ mov(
ebx, Operand(
esp, kSavedRegistersAreaSize));
1041 if (type() == EAGER) {
1042 __ Set(
ecx, Immediate(0));
1043 __ lea(
edx, Operand(
esp, kSavedRegistersAreaSize + 1 * kPointerSize));
1045 __ mov(
ecx, Operand(
esp, kSavedRegistersAreaSize + 1 * kPointerSize));
1046 __ lea(
edx, Operand(
esp, kSavedRegistersAreaSize + 2 * kPointerSize));
1052 __ PrepareCallCFunction(6,
eax);
1054 __ mov(Operand(
esp, 0 * kPointerSize),
eax);
1055 __ mov(Operand(
esp, 1 * kPointerSize), Immediate(type()));
1056 __ mov(Operand(
esp, 2 * kPointerSize),
ebx);
1057 __ mov(Operand(
esp, 3 * kPointerSize),
ecx);
1058 __ mov(Operand(
esp, 4 * kPointerSize),
edx);
1059 __ mov(Operand(
esp, 5 * kPointerSize),
1060 Immediate(ExternalReference::isolate_address()));
1062 AllowExternalCallThatCantCauseGC scope(masm());
1063 __ CallCFunction(ExternalReference::new_deoptimizer_function(isolate), 6);
1071 for (
int i = kNumberOfRegisters - 1; i >= 0; i--) {
1073 __ pop(Operand(
ebx, offset));
1079 int dst_offset = i * kDoubleSize + double_regs_offset;
1081 __ movdbl(
xmm0, Operand(
esp, src_offset));
1082 __ movdbl(Operand(
ebx, dst_offset),
xmm0);
1086 if (type() == EAGER) {
1087 __ add(
esp, Immediate(kDoubleRegsSize + kPointerSize));
1089 __ add(
esp, Immediate(kDoubleRegsSize + 2 * kPointerSize));
1103 __ pop(Operand(
edx, 0));
1104 __ add(
edx, Immediate(
sizeof(uint32_t)));
1110 __ PrepareCallCFunction(1,
ebx);
1111 __ mov(Operand(
esp, 0 * kPointerSize),
eax);
1113 AllowExternalCallThatCantCauseGC scope(masm());
1115 ExternalReference::compute_output_frames_function(isolate), 1);
1119 if (type() != OSR) {
1126 if (FLAG_debug_code) {
1128 __ Assert(
equal,
"alignment marker expected");
1130 __ bind(&no_padding);
1138 __ bind(&no_padding);
1142 Label outer_push_loop, inner_push_loop;
1148 __ bind(&outer_push_loop);
1152 __ bind(&inner_push_loop);
1153 __ sub(
ecx, Immediate(
sizeof(uint32_t)));
1157 __ add(
eax, Immediate(kPointerSize));
1159 __ j(
below, &outer_push_loop);
1162 if (type() == OSR) {
1165 int src_offset = i * kDoubleSize + double_regs_offset;
1166 __ movdbl(xmm_reg, Operand(
ebx, src_offset));
1171 if (type() != OSR) {
1179 for (
int i = 0; i < kNumberOfRegisters; i++) {
1181 __ push(Operand(
ebx, offset));
1195 for (
int i = 0; i < count(); i++) {
1196 int start = masm()->pc_offset();
1200 ASSERT(masm()->pc_offset() - start == table_entry_size_);
1210 #endif // V8_TARGET_ARCH_IA32
static int registers_offset()
static const int kCallerFPOffset
Code * builtin(Name name)
static DeoptimizationOutputData * cast(Object *obj)
void PrintF(const char *format,...)
static Smi * FromInt(int value)
void SetFrameSlot(unsigned offset, intptr_t value)
static XMMRegister FromAllocationIndex(int index)
static const int kNumAllocatableRegisters
const int kNoAlignmentPadding
static const int kCallInstructionLength
#define ASSERT(condition)
static void DeoptimizeFunction(JSFunction *function)
#define ASSERT_GE(v1, v2)
intptr_t GetContext() const
void SetFrameType(StackFrame::Type type)
const int kAlignmentPaddingPushed
static const int kNumRegisters
static int double_registers_offset()
static int frame_content_offset()
static int output_offset()
static void set_target_address_at(Address pc, Address target)
static int state_offset()
void SetRegister(unsigned n, intptr_t value)
static unsigned decode(uint32_t value)
static const int kDynamicAlignmentStateOffset
friend class DeoptimizingCodeListNode
const int kAlignmentZapValue
static const int kFixedFrameSize
static void EnsureRelocSpaceForLazyDeoptimization(Handle< Code > code)
static int GetOutputInfo(DeoptimizationOutputData *data, BailoutId node_id, SharedFunctionInfo *shared)
static void ReplaceCodeForRelatedFunctions(JSFunction *function, Code *code)
uint32_t GetFrameSize() const
void SetContinuation(intptr_t pc)
static int frame_size_offset()
static int output_count_offset()
static const int kHeaderSize
static const int kNumAllocatableRegisters
static void RevertStackCheckCodeAt(Code *unoptimized_code, Address pc_after, Code *check_code, Code *replacement_code)
static Address target_address_at(Address pc)
intptr_t GetFrameSlot(unsigned offset)
static Address GetDeoptimizationEntry(int id, BailoutType type)
static const int kContextOffset
static const int kFunctionOffset
static const int kCallerPCOffset
#define ASSERT_EQ(v1, v2)
friend class FrameDescription
virtual void GeneratePrologue()
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if NULL
static uint32_t & uint32_at(Address addr)
#define RUNTIME_ENTRY(name, nargs, ressize)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
static const int kFrameSize
intptr_t GetRegister(unsigned n) const
static const int kMarkerOffset
void SetDoubleRegister(unsigned n, double value)
static const int kLocal0Offset
static void PatchStackCheckCodeAt(Code *unoptimized_code, Address pc_after, Code *check_code, Code *replacement_code)
static int continuation_offset()
static int has_alignment_padding_offset()
static int input_offset()
static JSFunction * cast(Object *obj)