v8  3.11.10(node0.8.26)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
deoptimizer.cc
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "v8.h"
29 
30 #include "codegen.h"
31 #include "deoptimizer.h"
32 #include "disasm.h"
33 #include "full-codegen.h"
34 #include "global-handles.h"
35 #include "macro-assembler.h"
36 #include "prettyprinter.h"
37 
38 
39 namespace v8 {
40 namespace internal {
41 
43  eager_deoptimization_entry_code_ = NULL;
44  lazy_deoptimization_entry_code_ = NULL;
45  current_ = NULL;
46  deoptimizing_code_list_ = NULL;
47 #ifdef ENABLE_DEBUGGER_SUPPORT
48  deoptimized_frame_info_ = NULL;
49 #endif
50 }
51 
52 
54  if (eager_deoptimization_entry_code_ != NULL) {
55  Isolate::Current()->memory_allocator()->Free(
56  eager_deoptimization_entry_code_);
57  eager_deoptimization_entry_code_ = NULL;
58  }
59  if (lazy_deoptimization_entry_code_ != NULL) {
60  Isolate::Current()->memory_allocator()->Free(
61  lazy_deoptimization_entry_code_);
62  lazy_deoptimization_entry_code_ = NULL;
63  }
64 }
65 
66 
67 #ifdef ENABLE_DEBUGGER_SUPPORT
68 void DeoptimizerData::Iterate(ObjectVisitor* v) {
69  if (deoptimized_frame_info_ != NULL) {
70  deoptimized_frame_info_->Iterate(v);
71  }
72 }
73 #endif
74 
75 
76 // We rely on this function not causing a GC. It is called from generated code
77 // without having a real stack frame in place.
80  unsigned bailout_id,
81  Address from,
82  int fp_to_sp_delta,
83  Isolate* isolate) {
84  ASSERT(isolate == Isolate::Current());
85  Deoptimizer* deoptimizer = new Deoptimizer(isolate,
86  function,
87  type,
88  bailout_id,
89  from,
90  fp_to_sp_delta,
91  NULL);
92  ASSERT(isolate->deoptimizer_data()->current_ == NULL);
93  isolate->deoptimizer_data()->current_ = deoptimizer;
94  return deoptimizer;
95 }
96 
97 
99  ASSERT(isolate == Isolate::Current());
100  Deoptimizer* result = isolate->deoptimizer_data()->current_;
101  ASSERT(result != NULL);
102  result->DeleteFrameDescriptions();
103  isolate->deoptimizer_data()->current_ = NULL;
104  return result;
105 }
106 
107 
109  if (jsframe_index == 0) return 0;
110 
111  int frame_index = 0;
112  while (jsframe_index >= 0) {
113  FrameDescription* frame = output_[frame_index];
114  if (frame->GetFrameType() == StackFrame::JAVA_SCRIPT) {
115  jsframe_index--;
116  }
117  frame_index++;
118  }
119 
120  return frame_index - 1;
121 }
122 
123 
124 #ifdef ENABLE_DEBUGGER_SUPPORT
125 DeoptimizedFrameInfo* Deoptimizer::DebuggerInspectableFrame(
126  JavaScriptFrame* frame,
127  int jsframe_index,
128  Isolate* isolate) {
129  ASSERT(isolate == Isolate::Current());
130  ASSERT(frame->is_optimized());
131  ASSERT(isolate->deoptimizer_data()->deoptimized_frame_info_ == NULL);
132 
133  // Get the function and code from the frame.
134  JSFunction* function = JSFunction::cast(frame->function());
135  Code* code = frame->LookupCode();
136 
137  // Locate the deoptimization point in the code. As we are at a call the
138  // return address must be at a place in the code with deoptimization support.
139  SafepointEntry safepoint_entry = code->GetSafepointEntry(frame->pc());
140  int deoptimization_index = safepoint_entry.deoptimization_index();
141  ASSERT(deoptimization_index != Safepoint::kNoDeoptimizationIndex);
142 
143  // Always use the actual stack slots when calculating the fp to sp
144  // delta adding two for the function and context.
145  unsigned stack_slots = code->stack_slots();
146  unsigned fp_to_sp_delta = ((stack_slots + 2) * kPointerSize);
147 
148  Deoptimizer* deoptimizer = new Deoptimizer(isolate,
149  function,
151  deoptimization_index,
152  frame->pc(),
153  fp_to_sp_delta,
154  code);
155  Address tos = frame->fp() - fp_to_sp_delta;
156  deoptimizer->FillInputFrame(tos, frame);
157 
158  // Calculate the output frames.
160 
161  // Create the GC safe output frame information and register it for GC
162  // handling.
163  ASSERT_LT(jsframe_index, deoptimizer->jsframe_count());
164 
165  // Convert JS frame index into frame index.
166  int frame_index = deoptimizer->ConvertJSFrameIndexToFrameIndex(jsframe_index);
167 
168  bool has_arguments_adaptor =
169  frame_index > 0 &&
170  deoptimizer->output_[frame_index - 1]->GetFrameType() ==
172 
173  int construct_offset = has_arguments_adaptor ? 2 : 1;
174  bool has_construct_stub =
175  frame_index >= construct_offset &&
176  deoptimizer->output_[frame_index - construct_offset]->GetFrameType() ==
177  StackFrame::CONSTRUCT;
178 
179  DeoptimizedFrameInfo* info = new DeoptimizedFrameInfo(deoptimizer,
180  frame_index,
181  has_arguments_adaptor,
182  has_construct_stub);
183  isolate->deoptimizer_data()->deoptimized_frame_info_ = info;
184 
185  // Get the "simulated" top and size for the requested frame.
186  FrameDescription* parameters_frame =
187  deoptimizer->output_[
188  has_arguments_adaptor ? (frame_index - 1) : frame_index];
189 
190  uint32_t parameters_size = (info->parameters_count() + 1) * kPointerSize;
191  Address parameters_top = reinterpret_cast<Address>(
192  parameters_frame->GetTop() + (parameters_frame->GetFrameSize() -
193  parameters_size));
194 
195  uint32_t expressions_size = info->expression_count() * kPointerSize;
196  Address expressions_top = reinterpret_cast<Address>(
197  deoptimizer->output_[frame_index]->GetTop());
198 
199  // Done with the GC-unsafe frame descriptions. This re-enables allocation.
200  deoptimizer->DeleteFrameDescriptions();
201 
202  // Allocate a heap number for the doubles belonging to this frame.
203  deoptimizer->MaterializeHeapNumbersForDebuggerInspectableFrame(
204  parameters_top, parameters_size, expressions_top, expressions_size, info);
205 
206  // Finished using the deoptimizer instance.
207  delete deoptimizer;
208 
209  return info;
210 }
211 
212 
213 void Deoptimizer::DeleteDebuggerInspectableFrame(DeoptimizedFrameInfo* info,
214  Isolate* isolate) {
215  ASSERT(isolate == Isolate::Current());
216  ASSERT(isolate->deoptimizer_data()->deoptimized_frame_info_ == info);
217  delete info;
218  isolate->deoptimizer_data()->deoptimized_frame_info_ = NULL;
219 }
220 #endif
221 
222 void Deoptimizer::GenerateDeoptimizationEntries(MacroAssembler* masm,
223  int count,
224  BailoutType type) {
225  TableEntryGenerator generator(masm, type, count);
226  generator.Generate();
227 }
228 
229 
230 class DeoptimizingVisitor : public OptimizedFunctionVisitor {
231  public:
232  virtual void EnterContext(Context* context) {
233  if (FLAG_trace_deopt) {
234  PrintF("[deoptimize context: %" V8PRIxPTR "]\n",
235  reinterpret_cast<intptr_t>(context));
236  }
237  }
238 
239  virtual void VisitFunction(JSFunction* function) {
241  }
242 
243  virtual void LeaveContext(Context* context) {
244  context->ClearOptimizedFunctions();
245  }
246 };
247 
248 
250  AssertNoAllocation no_allocation;
251 
252  if (FLAG_trace_deopt) {
253  PrintF("[deoptimize all contexts]\n");
254  }
255 
256  DeoptimizingVisitor visitor;
257  VisitAllOptimizedFunctions(&visitor);
258 }
259 
260 
262  AssertNoAllocation no_allocation;
263 
264  DeoptimizingVisitor visitor;
266 }
267 
268 
270  Context* context, OptimizedFunctionVisitor* visitor) {
271  AssertNoAllocation no_allocation;
272 
273  ASSERT(context->IsGlobalContext());
274 
275  visitor->EnterContext(context);
276  // Run through the list of optimized functions and deoptimize them.
277  Object* element = context->OptimizedFunctionsListHead();
278  while (!element->IsUndefined()) {
279  JSFunction* element_function = JSFunction::cast(element);
280  // Get the next link before deoptimizing as deoptimizing will clear the
281  // next link.
282  element = element_function->next_function_link();
283  visitor->VisitFunction(element_function);
284  }
285  visitor->LeaveContext(context);
286 }
287 
288 
290  JSObject* object, OptimizedFunctionVisitor* visitor) {
291  AssertNoAllocation no_allocation;
292 
293  if (object->IsJSGlobalProxy()) {
294  Object* proto = object->GetPrototype();
295  ASSERT(proto->IsJSGlobalObject());
297  GlobalObject::cast(proto)->global_context(), visitor);
298  } else if (object->IsGlobalObject()) {
300  GlobalObject::cast(object)->global_context(), visitor);
301  }
302 }
303 
304 
306  OptimizedFunctionVisitor* visitor) {
307  AssertNoAllocation no_allocation;
308 
309  // Run through the list of all global contexts and deoptimize.
310  Object* context = Isolate::Current()->heap()->global_contexts_list();
311  while (!context->IsUndefined()) {
312  // GC can happen when the context is not fully initialized,
313  // so the global field of the context can be undefined.
314  Object* global = Context::cast(context)->get(Context::GLOBAL_INDEX);
315  if (!global->IsUndefined()) {
317  visitor);
318  }
319  context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK);
320  }
321 }
322 
323 
324 void Deoptimizer::HandleWeakDeoptimizedCode(
325  v8::Persistent<v8::Value> obj, void* data) {
327  reinterpret_cast<DeoptimizingCodeListNode*>(data);
328  RemoveDeoptimizingCode(*node->code());
329 #ifdef DEBUG
330  node = Isolate::Current()->deoptimizer_data()->deoptimizing_code_list_;
331  while (node != NULL) {
332  ASSERT(node != reinterpret_cast<DeoptimizingCodeListNode*>(data));
333  node = node->next();
334  }
335 #endif
336 }
337 
338 
340  deoptimizer->DoComputeOutputFrames();
341 }
342 
343 
344 Deoptimizer::Deoptimizer(Isolate* isolate,
345  JSFunction* function,
346  BailoutType type,
347  unsigned bailout_id,
348  Address from,
349  int fp_to_sp_delta,
350  Code* optimized_code)
351  : isolate_(isolate),
352  function_(function),
353  bailout_id_(bailout_id),
354  bailout_type_(type),
355  from_(from),
356  fp_to_sp_delta_(fp_to_sp_delta),
357  has_alignment_padding_(0),
358  input_(NULL),
359  output_count_(0),
360  jsframe_count_(0),
361  output_(NULL),
362  deferred_heap_numbers_(0) {
363  if (FLAG_trace_deopt && type != OSR) {
364  if (type == DEBUGGER) {
365  PrintF("**** DEOPT FOR DEBUGGER: ");
366  } else {
367  PrintF("**** DEOPT: ");
368  }
369  function->PrintName();
370  PrintF(" at bailout #%u, address 0x%" V8PRIxPTR ", frame size %d\n",
371  bailout_id,
372  reinterpret_cast<intptr_t>(from),
373  fp_to_sp_delta - (2 * kPointerSize));
374  } else if (FLAG_trace_osr && type == OSR) {
375  PrintF("**** OSR: ");
376  function->PrintName();
377  PrintF(" at ast id #%u, address 0x%" V8PRIxPTR ", frame size %d\n",
378  bailout_id,
379  reinterpret_cast<intptr_t>(from),
380  fp_to_sp_delta - (2 * kPointerSize));
381  }
382  function->shared()->increment_deopt_count();
383  // Find the optimized code.
384  if (type == EAGER) {
385  ASSERT(from == NULL);
386  optimized_code_ = function_->code();
387  if (FLAG_trace_deopt && FLAG_code_comments) {
388  // Print instruction associated with this bailout.
389  const char* last_comment = NULL;
390  int mask = RelocInfo::ModeMask(RelocInfo::COMMENT)
391  | RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY);
392  for (RelocIterator it(optimized_code_, mask); !it.done(); it.next()) {
393  RelocInfo* info = it.rinfo();
394  if (info->rmode() == RelocInfo::COMMENT) {
395  last_comment = reinterpret_cast<const char*>(info->data());
396  }
397  if (info->rmode() == RelocInfo::RUNTIME_ENTRY) {
398  unsigned id = Deoptimizer::GetDeoptimizationId(
399  info->target_address(), Deoptimizer::EAGER);
400  if (id == bailout_id && last_comment != NULL) {
401  PrintF(" %s\n", last_comment);
402  break;
403  }
404  }
405  }
406  }
407  } else if (type == LAZY) {
408  optimized_code_ = FindDeoptimizingCodeFromAddress(from);
409  ASSERT(optimized_code_ != NULL);
410  } else if (type == OSR) {
411  // The function has already been optimized and we're transitioning
412  // from the unoptimized shared version to the optimized one in the
413  // function. The return address (from) points to unoptimized code.
414  optimized_code_ = function_->code();
415  ASSERT(optimized_code_->kind() == Code::OPTIMIZED_FUNCTION);
416  ASSERT(!optimized_code_->contains(from));
417  } else if (type == DEBUGGER) {
418  optimized_code_ = optimized_code;
419  ASSERT(optimized_code_->contains(from));
420  }
421  ASSERT(HEAP->allow_allocation(false));
422  unsigned size = ComputeInputFrameSize();
423  input_ = new(size) FrameDescription(size, function);
424  input_->SetFrameType(StackFrame::JAVA_SCRIPT);
425 }
426 
427 
429  ASSERT(input_ == NULL && output_ == NULL);
430 }
431 
432 
433 void Deoptimizer::DeleteFrameDescriptions() {
434  delete input_;
435  for (int i = 0; i < output_count_; ++i) {
436  if (output_[i] != input_) delete output_[i];
437  }
438  delete[] output_;
439  input_ = NULL;
440  output_ = NULL;
441  ASSERT(!HEAP->allow_allocation(true));
442 }
443 
444 
446  ASSERT(id >= 0);
447  if (id >= kNumberOfEntries) return NULL;
448  MemoryChunk* base = NULL;
449  DeoptimizerData* data = Isolate::Current()->deoptimizer_data();
450  if (type == EAGER) {
451  if (data->eager_deoptimization_entry_code_ == NULL) {
452  data->eager_deoptimization_entry_code_ = CreateCode(type);
453  }
454  base = data->eager_deoptimization_entry_code_;
455  } else {
456  if (data->lazy_deoptimization_entry_code_ == NULL) {
457  data->lazy_deoptimization_entry_code_ = CreateCode(type);
458  }
459  base = data->lazy_deoptimization_entry_code_;
460  }
461  return
462  static_cast<Address>(base->area_start()) + (id * table_entry_size_);
463 }
464 
465 
467  MemoryChunk* base = NULL;
468  DeoptimizerData* data = Isolate::Current()->deoptimizer_data();
469  if (type == EAGER) {
470  base = data->eager_deoptimization_entry_code_;
471  } else {
472  base = data->lazy_deoptimization_entry_code_;
473  }
474  if (base == NULL ||
475  addr < base->area_start() ||
476  addr >= base->area_start() +
477  (kNumberOfEntries * table_entry_size_)) {
479  }
480  ASSERT_EQ(0,
481  static_cast<int>(addr - base->area_start()) % table_entry_size_);
482  return static_cast<int>(addr - base->area_start()) / table_entry_size_;
483 }
484 
485 
487  unsigned id,
488  SharedFunctionInfo* shared) {
489  // TODO(kasperl): For now, we do a simple linear search for the PC
490  // offset associated with the given node id. This should probably be
491  // changed to a binary search.
492  int length = data->DeoptPoints();
493  Smi* smi_id = Smi::FromInt(id);
494  for (int i = 0; i < length; i++) {
495  if (data->AstId(i) == smi_id) {
496  return data->PcAndState(i)->value();
497  }
498  }
499  PrintF("[couldn't find pc offset for node=%u]\n", id);
500  PrintF("[method: %s]\n", *shared->DebugName()->ToCString());
501  // Print the source code if available.
502  HeapStringAllocator string_allocator;
503  StringStream stream(&string_allocator);
504  shared->SourceCodePrint(&stream, -1);
505  PrintF("[source:\n%s\n]", *stream.ToCString());
506 
507  UNREACHABLE();
508  return -1;
509 }
510 
511 
513  int length = 0;
515  isolate->deoptimizer_data()->deoptimizing_code_list_;
516  while (node != NULL) {
517  length++;
518  node = node->next();
519  }
520  return length;
521 }
522 
523 
524 // We rely on this function not causing a GC. It is called from generated code
525 // without having a real stack frame in place.
526 void Deoptimizer::DoComputeOutputFrames() {
527  if (bailout_type_ == OSR) {
528  DoComputeOsrOutputFrame();
529  return;
530  }
531 
532  // Print some helpful diagnostic information.
533  int64_t start = OS::Ticks();
534  if (FLAG_trace_deopt) {
535  PrintF("[deoptimizing%s: begin 0x%08" V8PRIxPTR " ",
536  (bailout_type_ == LAZY ? " (lazy)" : ""),
537  reinterpret_cast<intptr_t>(function_));
538  function_->PrintName();
539  PrintF(" @%d]\n", bailout_id_);
540  }
541 
542  // Determine basic deoptimization information. The optimized frame is
543  // described by the input data.
544  DeoptimizationInputData* input_data =
545  DeoptimizationInputData::cast(optimized_code_->deoptimization_data());
546  unsigned node_id = input_data->AstId(bailout_id_)->value();
547  ByteArray* translations = input_data->TranslationByteArray();
548  unsigned translation_index =
549  input_data->TranslationIndex(bailout_id_)->value();
550 
551  // Do the input frame to output frame(s) translation.
552  TranslationIterator iterator(translations, translation_index);
553  Translation::Opcode opcode =
554  static_cast<Translation::Opcode>(iterator.Next());
555  ASSERT(Translation::BEGIN == opcode);
556  USE(opcode);
557  // Read the number of output frames and allocate an array for their
558  // descriptions.
559  int count = iterator.Next();
560  iterator.Next(); // Drop JS frames count.
561  ASSERT(output_ == NULL);
562  output_ = new FrameDescription*[count];
563  for (int i = 0; i < count; ++i) {
564  output_[i] = NULL;
565  }
566  output_count_ = count;
567 
568  // Translate each output frame.
569  for (int i = 0; i < count; ++i) {
570  // Read the ast node id, function, and frame height for this output frame.
571  Translation::Opcode opcode =
572  static_cast<Translation::Opcode>(iterator.Next());
573  switch (opcode) {
574  case Translation::JS_FRAME:
575  DoComputeJSFrame(&iterator, i);
576  jsframe_count_++;
577  break;
578  case Translation::ARGUMENTS_ADAPTOR_FRAME:
579  DoComputeArgumentsAdaptorFrame(&iterator, i);
580  break;
581  case Translation::CONSTRUCT_STUB_FRAME:
582  DoComputeConstructStubFrame(&iterator, i);
583  break;
584  default:
585  UNREACHABLE();
586  break;
587  }
588  }
589 
590  // Print some helpful diagnostic information.
591  if (FLAG_trace_deopt) {
592  double ms = static_cast<double>(OS::Ticks() - start) / 1000;
593  int index = output_count_ - 1; // Index of the topmost frame.
594  JSFunction* function = output_[index]->GetFunction();
595  PrintF("[deoptimizing: end 0x%08" V8PRIxPTR " ",
596  reinterpret_cast<intptr_t>(function));
597  function->PrintName();
598  PrintF(" => node=%u, pc=0x%08" V8PRIxPTR ", state=%s, alignment=%s,"
599  " took %0.3f ms]\n",
600  node_id,
601  output_[index]->GetPc(),
603  static_cast<FullCodeGenerator::State>(
604  output_[index]->GetState()->value())),
605  has_alignment_padding_ ? "with padding" : "no padding",
606  ms);
607  }
608 }
609 
610 
612  ASSERT_NE(DEBUGGER, bailout_type_);
613  for (int i = 0; i < deferred_heap_numbers_.length(); i++) {
614  HeapNumberMaterializationDescriptor d = deferred_heap_numbers_[i];
615  Handle<Object> num = isolate_->factory()->NewNumber(d.value());
616  if (FLAG_trace_deopt) {
617  PrintF("Materializing a new heap number %p [%e] in slot %p\n",
618  reinterpret_cast<void*>(*num),
619  d.value(),
620  d.slot_address());
621  }
622 
623  Memory::Object_at(d.slot_address()) = *num;
624  }
625 }
626 
627 
628 #ifdef ENABLE_DEBUGGER_SUPPORT
629 void Deoptimizer::MaterializeHeapNumbersForDebuggerInspectableFrame(
630  Address parameters_top,
631  uint32_t parameters_size,
632  Address expressions_top,
633  uint32_t expressions_size,
634  DeoptimizedFrameInfo* info) {
635  ASSERT_EQ(DEBUGGER, bailout_type_);
636  Address parameters_bottom = parameters_top + parameters_size;
637  Address expressions_bottom = expressions_top + expressions_size;
638  for (int i = 0; i < deferred_heap_numbers_.length(); i++) {
639  HeapNumberMaterializationDescriptor d = deferred_heap_numbers_[i];
640 
641  // Check of the heap number to materialize actually belong to the frame
642  // being extracted.
643  Address slot = d.slot_address();
644  if (parameters_top <= slot && slot < parameters_bottom) {
645  Handle<Object> num = isolate_->factory()->NewNumber(d.value());
646 
647  int index = (info->parameters_count() - 1) -
648  static_cast<int>(slot - parameters_top) / kPointerSize;
649 
650  if (FLAG_trace_deopt) {
651  PrintF("Materializing a new heap number %p [%e] in slot %p"
652  "for parameter slot #%d\n",
653  reinterpret_cast<void*>(*num),
654  d.value(),
655  d.slot_address(),
656  index);
657  }
658 
659  info->SetParameter(index, *num);
660  } else if (expressions_top <= slot && slot < expressions_bottom) {
661  Handle<Object> num = isolate_->factory()->NewNumber(d.value());
662 
663  int index = info->expression_count() - 1 -
664  static_cast<int>(slot - expressions_top) / kPointerSize;
665 
666  if (FLAG_trace_deopt) {
667  PrintF("Materializing a new heap number %p [%e] in slot %p"
668  "for expression slot #%d\n",
669  reinterpret_cast<void*>(*num),
670  d.value(),
671  d.slot_address(),
672  index);
673  }
674 
675  info->SetExpression(index, *num);
676  }
677  }
678 }
679 #endif
680 
681 
682 void Deoptimizer::DoTranslateCommand(TranslationIterator* iterator,
683  int frame_index,
684  unsigned output_offset) {
685  disasm::NameConverter converter;
686  // A GC-safe temporary placeholder that we can put in the output frame.
687  const intptr_t kPlaceholder = reinterpret_cast<intptr_t>(Smi::FromInt(0));
688 
689  // Ignore commands marked as duplicate and act on the first non-duplicate.
690  Translation::Opcode opcode =
691  static_cast<Translation::Opcode>(iterator->Next());
692  while (opcode == Translation::DUPLICATE) {
693  opcode = static_cast<Translation::Opcode>(iterator->Next());
694  iterator->Skip(Translation::NumberOfOperandsFor(opcode));
695  opcode = static_cast<Translation::Opcode>(iterator->Next());
696  }
697 
698  switch (opcode) {
699  case Translation::BEGIN:
700  case Translation::JS_FRAME:
701  case Translation::ARGUMENTS_ADAPTOR_FRAME:
702  case Translation::CONSTRUCT_STUB_FRAME:
703  case Translation::DUPLICATE:
704  UNREACHABLE();
705  return;
706 
707  case Translation::REGISTER: {
708  int input_reg = iterator->Next();
709  intptr_t input_value = input_->GetRegister(input_reg);
710  if (FLAG_trace_deopt) {
711  PrintF(
712  " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08" V8PRIxPTR " ; %s ",
713  output_[frame_index]->GetTop() + output_offset,
714  output_offset,
715  input_value,
716  converter.NameOfCPURegister(input_reg));
717  reinterpret_cast<Object*>(input_value)->ShortPrint();
718  PrintF("\n");
719  }
720  output_[frame_index]->SetFrameSlot(output_offset, input_value);
721  return;
722  }
723 
724  case Translation::INT32_REGISTER: {
725  int input_reg = iterator->Next();
726  intptr_t value = input_->GetRegister(input_reg);
727  bool is_smi = Smi::IsValid(value);
728  if (FLAG_trace_deopt) {
729  PrintF(
730  " 0x%08" V8PRIxPTR ": [top + %d] <- %" V8PRIdPTR " ; %s (%s)\n",
731  output_[frame_index]->GetTop() + output_offset,
732  output_offset,
733  value,
734  converter.NameOfCPURegister(input_reg),
735  is_smi ? "smi" : "heap number");
736  }
737  if (is_smi) {
738  intptr_t tagged_value =
739  reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(value)));
740  output_[frame_index]->SetFrameSlot(output_offset, tagged_value);
741  } else {
742  // We save the untagged value on the side and store a GC-safe
743  // temporary placeholder in the frame.
744  AddDoubleValue(output_[frame_index]->GetTop() + output_offset,
745  static_cast<double>(static_cast<int32_t>(value)));
746  output_[frame_index]->SetFrameSlot(output_offset, kPlaceholder);
747  }
748  return;
749  }
750 
751  case Translation::DOUBLE_REGISTER: {
752  int input_reg = iterator->Next();
753  double value = input_->GetDoubleRegister(input_reg);
754  if (FLAG_trace_deopt) {
755  PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- %e ; %s\n",
756  output_[frame_index]->GetTop() + output_offset,
757  output_offset,
758  value,
760  }
761  // We save the untagged value on the side and store a GC-safe
762  // temporary placeholder in the frame.
763  AddDoubleValue(output_[frame_index]->GetTop() + output_offset, value);
764  output_[frame_index]->SetFrameSlot(output_offset, kPlaceholder);
765  return;
766  }
767 
768  case Translation::STACK_SLOT: {
769  int input_slot_index = iterator->Next();
770  unsigned input_offset =
771  input_->GetOffsetFromSlotIndex(input_slot_index);
772  intptr_t input_value = input_->GetFrameSlot(input_offset);
773  if (FLAG_trace_deopt) {
774  PrintF(" 0x%08" V8PRIxPTR ": ",
775  output_[frame_index]->GetTop() + output_offset);
776  PrintF("[top + %d] <- 0x%08" V8PRIxPTR " ; [sp + %d] ",
777  output_offset,
778  input_value,
779  input_offset);
780  reinterpret_cast<Object*>(input_value)->ShortPrint();
781  PrintF("\n");
782  }
783  output_[frame_index]->SetFrameSlot(output_offset, input_value);
784  return;
785  }
786 
787  case Translation::INT32_STACK_SLOT: {
788  int input_slot_index = iterator->Next();
789  unsigned input_offset =
790  input_->GetOffsetFromSlotIndex(input_slot_index);
791  intptr_t value = input_->GetFrameSlot(input_offset);
792  bool is_smi = Smi::IsValid(value);
793  if (FLAG_trace_deopt) {
794  PrintF(" 0x%08" V8PRIxPTR ": ",
795  output_[frame_index]->GetTop() + output_offset);
796  PrintF("[top + %d] <- %" V8PRIdPTR " ; [sp + %d] (%s)\n",
797  output_offset,
798  value,
799  input_offset,
800  is_smi ? "smi" : "heap number");
801  }
802  if (is_smi) {
803  intptr_t tagged_value =
804  reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(value)));
805  output_[frame_index]->SetFrameSlot(output_offset, tagged_value);
806  } else {
807  // We save the untagged value on the side and store a GC-safe
808  // temporary placeholder in the frame.
809  AddDoubleValue(output_[frame_index]->GetTop() + output_offset,
810  static_cast<double>(static_cast<int32_t>(value)));
811  output_[frame_index]->SetFrameSlot(output_offset, kPlaceholder);
812  }
813  return;
814  }
815 
816  case Translation::DOUBLE_STACK_SLOT: {
817  int input_slot_index = iterator->Next();
818  unsigned input_offset =
819  input_->GetOffsetFromSlotIndex(input_slot_index);
820  double value = input_->GetDoubleFrameSlot(input_offset);
821  if (FLAG_trace_deopt) {
822  PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- %e ; [sp + %d]\n",
823  output_[frame_index]->GetTop() + output_offset,
824  output_offset,
825  value,
826  input_offset);
827  }
828  // We save the untagged value on the side and store a GC-safe
829  // temporary placeholder in the frame.
830  AddDoubleValue(output_[frame_index]->GetTop() + output_offset, value);
831  output_[frame_index]->SetFrameSlot(output_offset, kPlaceholder);
832  return;
833  }
834 
835  case Translation::LITERAL: {
836  Object* literal = ComputeLiteral(iterator->Next());
837  if (FLAG_trace_deopt) {
838  PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- ",
839  output_[frame_index]->GetTop() + output_offset,
840  output_offset);
841  literal->ShortPrint();
842  PrintF(" ; literal\n");
843  }
844  intptr_t value = reinterpret_cast<intptr_t>(literal);
845  output_[frame_index]->SetFrameSlot(output_offset, value);
846  return;
847  }
848 
849  case Translation::ARGUMENTS_OBJECT: {
850  // Use the arguments marker value as a sentinel and fill in the arguments
851  // object after the deoptimized frame is built.
852  if (FLAG_trace_deopt) {
853  PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- ",
854  output_[frame_index]->GetTop() + output_offset,
855  output_offset);
856  isolate_->heap()->arguments_marker()->ShortPrint();
857  PrintF(" ; arguments object\n");
858  }
859  intptr_t value = reinterpret_cast<intptr_t>(
860  isolate_->heap()->arguments_marker());
861  output_[frame_index]->SetFrameSlot(output_offset, value);
862  return;
863  }
864  }
865 }
866 
867 
868 bool Deoptimizer::DoOsrTranslateCommand(TranslationIterator* iterator,
869  int* input_offset) {
870  disasm::NameConverter converter;
871  FrameDescription* output = output_[0];
872 
873  // The input values are all part of the unoptimized frame so they
874  // are all tagged pointers.
875  uintptr_t input_value = input_->GetFrameSlot(*input_offset);
876  Object* input_object = reinterpret_cast<Object*>(input_value);
877 
878  Translation::Opcode opcode =
879  static_cast<Translation::Opcode>(iterator->Next());
880  bool duplicate = (opcode == Translation::DUPLICATE);
881  if (duplicate) {
882  opcode = static_cast<Translation::Opcode>(iterator->Next());
883  }
884 
885  switch (opcode) {
886  case Translation::BEGIN:
887  case Translation::JS_FRAME:
888  case Translation::ARGUMENTS_ADAPTOR_FRAME:
889  case Translation::CONSTRUCT_STUB_FRAME:
890  case Translation::DUPLICATE:
891  UNREACHABLE(); // Malformed input.
892  return false;
893 
894  case Translation::REGISTER: {
895  int output_reg = iterator->Next();
896  if (FLAG_trace_osr) {
897  PrintF(" %s <- 0x%08" V8PRIxPTR " ; [sp + %d]\n",
898  converter.NameOfCPURegister(output_reg),
899  input_value,
900  *input_offset);
901  }
902  output->SetRegister(output_reg, input_value);
903  break;
904  }
905 
906  case Translation::INT32_REGISTER: {
907  // Abort OSR if we don't have a number.
908  if (!input_object->IsNumber()) return false;
909 
910  int output_reg = iterator->Next();
911  int int32_value = input_object->IsSmi()
912  ? Smi::cast(input_object)->value()
913  : FastD2I(input_object->Number());
914  // Abort the translation if the conversion lost information.
915  if (!input_object->IsSmi() &&
916  FastI2D(int32_value) != input_object->Number()) {
917  if (FLAG_trace_osr) {
918  PrintF("**** %g could not be converted to int32 ****\n",
919  input_object->Number());
920  }
921  return false;
922  }
923  if (FLAG_trace_osr) {
924  PrintF(" %s <- %d (int32) ; [sp + %d]\n",
925  converter.NameOfCPURegister(output_reg),
926  int32_value,
927  *input_offset);
928  }
929  output->SetRegister(output_reg, int32_value);
930  break;
931  }
932 
933  case Translation::DOUBLE_REGISTER: {
934  // Abort OSR if we don't have a number.
935  if (!input_object->IsNumber()) return false;
936 
937  int output_reg = iterator->Next();
938  double double_value = input_object->Number();
939  if (FLAG_trace_osr) {
940  PrintF(" %s <- %g (double) ; [sp + %d]\n",
942  double_value,
943  *input_offset);
944  }
945  output->SetDoubleRegister(output_reg, double_value);
946  break;
947  }
948 
949  case Translation::STACK_SLOT: {
950  int output_index = iterator->Next();
951  unsigned output_offset =
952  output->GetOffsetFromSlotIndex(output_index);
953  if (FLAG_trace_osr) {
954  PrintF(" [sp + %d] <- 0x%08" V8PRIxPTR " ; [sp + %d] ",
955  output_offset,
956  input_value,
957  *input_offset);
958  reinterpret_cast<Object*>(input_value)->ShortPrint();
959  PrintF("\n");
960  }
961  output->SetFrameSlot(output_offset, input_value);
962  break;
963  }
964 
965  case Translation::INT32_STACK_SLOT: {
966  // Abort OSR if we don't have a number.
967  if (!input_object->IsNumber()) return false;
968 
969  int output_index = iterator->Next();
970  unsigned output_offset =
971  output->GetOffsetFromSlotIndex(output_index);
972  int int32_value = input_object->IsSmi()
973  ? Smi::cast(input_object)->value()
974  : DoubleToInt32(input_object->Number());
975  // Abort the translation if the conversion lost information.
976  if (!input_object->IsSmi() &&
977  FastI2D(int32_value) != input_object->Number()) {
978  if (FLAG_trace_osr) {
979  PrintF("**** %g could not be converted to int32 ****\n",
980  input_object->Number());
981  }
982  return false;
983  }
984  if (FLAG_trace_osr) {
985  PrintF(" [sp + %d] <- %d (int32) ; [sp + %d]\n",
986  output_offset,
987  int32_value,
988  *input_offset);
989  }
990  output->SetFrameSlot(output_offset, int32_value);
991  break;
992  }
993 
994  case Translation::DOUBLE_STACK_SLOT: {
995  static const int kLowerOffset = 0 * kPointerSize;
996  static const int kUpperOffset = 1 * kPointerSize;
997 
998  // Abort OSR if we don't have a number.
999  if (!input_object->IsNumber()) return false;
1000 
1001  int output_index = iterator->Next();
1002  unsigned output_offset =
1003  output->GetOffsetFromSlotIndex(output_index);
1004  double double_value = input_object->Number();
1005  uint64_t int_value = BitCast<uint64_t, double>(double_value);
1006  int32_t lower = static_cast<int32_t>(int_value);
1007  int32_t upper = static_cast<int32_t>(int_value >> kBitsPerInt);
1008  if (FLAG_trace_osr) {
1009  PrintF(" [sp + %d] <- 0x%08x (upper bits of %g) ; [sp + %d]\n",
1010  output_offset + kUpperOffset,
1011  upper,
1012  double_value,
1013  *input_offset);
1014  PrintF(" [sp + %d] <- 0x%08x (lower bits of %g) ; [sp + %d]\n",
1015  output_offset + kLowerOffset,
1016  lower,
1017  double_value,
1018  *input_offset);
1019  }
1020  output->SetFrameSlot(output_offset + kLowerOffset, lower);
1021  output->SetFrameSlot(output_offset + kUpperOffset, upper);
1022  break;
1023  }
1024 
1025  case Translation::LITERAL: {
1026  // Just ignore non-materialized literals.
1027  iterator->Next();
1028  break;
1029  }
1030 
1031  case Translation::ARGUMENTS_OBJECT: {
1032  // Optimized code assumes that the argument object has not been
1033  // materialized and so bypasses it when doing arguments access.
1034  // We should have bailed out before starting the frame
1035  // translation.
1036  UNREACHABLE();
1037  return false;
1038  }
1039  }
1040 
1041  if (!duplicate) *input_offset -= kPointerSize;
1042  return true;
1043 }
1044 
1045 
1047  Code* check_code,
1048  Code* replacement_code) {
1049  // Iterate over the stack check table and patch every stack check
1050  // call to an unconditional call to the replacement code.
1051  ASSERT(unoptimized_code->kind() == Code::FUNCTION);
1052  Address stack_check_cursor = unoptimized_code->instruction_start() +
1053  unoptimized_code->stack_check_table_offset();
1054  uint32_t table_length = Memory::uint32_at(stack_check_cursor);
1055  stack_check_cursor += kIntSize;
1056  for (uint32_t i = 0; i < table_length; ++i) {
1057  uint32_t pc_offset = Memory::uint32_at(stack_check_cursor + kIntSize);
1058  Address pc_after = unoptimized_code->instruction_start() + pc_offset;
1059  PatchStackCheckCodeAt(unoptimized_code,
1060  pc_after,
1061  check_code,
1062  replacement_code);
1063  stack_check_cursor += 2 * kIntSize;
1064  }
1065 }
1066 
1067 
1069  Code* check_code,
1070  Code* replacement_code) {
1071  // Iterate over the stack check table and revert the patched
1072  // stack check calls.
1073  ASSERT(unoptimized_code->kind() == Code::FUNCTION);
1074  Address stack_check_cursor = unoptimized_code->instruction_start() +
1075  unoptimized_code->stack_check_table_offset();
1076  uint32_t table_length = Memory::uint32_at(stack_check_cursor);
1077  stack_check_cursor += kIntSize;
1078  for (uint32_t i = 0; i < table_length; ++i) {
1079  uint32_t pc_offset = Memory::uint32_at(stack_check_cursor + kIntSize);
1080  Address pc_after = unoptimized_code->instruction_start() + pc_offset;
1081  RevertStackCheckCodeAt(unoptimized_code,
1082  pc_after,
1083  check_code,
1084  replacement_code);
1085  stack_check_cursor += 2 * kIntSize;
1086  }
1087 }
1088 
1089 
1090 unsigned Deoptimizer::ComputeInputFrameSize() const {
1091  unsigned fixed_size = ComputeFixedSize(function_);
1092  // The fp-to-sp delta already takes the context and the function
1093  // into account so we have to avoid double counting them (-2).
1094  unsigned result = fixed_size + fp_to_sp_delta_ - (2 * kPointerSize);
1095 #ifdef DEBUG
1096  if (bailout_type_ == OSR) {
1097  // TODO(kasperl): It would be nice if we could verify that the
1098  // size matches with the stack height we can compute based on the
1099  // environment at the OSR entry. The code for that his built into
1100  // the DoComputeOsrOutputFrame function for now.
1101  } else {
1102  unsigned stack_slots = optimized_code_->stack_slots();
1103  unsigned outgoing_size = ComputeOutgoingArgumentSize();
1104  ASSERT(result == fixed_size + (stack_slots * kPointerSize) + outgoing_size);
1105  }
1106 #endif
1107  return result;
1108 }
1109 
1110 
1111 unsigned Deoptimizer::ComputeFixedSize(JSFunction* function) const {
1112  // The fixed part of the frame consists of the return address, frame
1113  // pointer, function, context, and all the incoming arguments.
1114  return ComputeIncomingArgumentSize(function) +
1116 }
1117 
1118 
1119 unsigned Deoptimizer::ComputeIncomingArgumentSize(JSFunction* function) const {
1120  // The incoming arguments is the values for formal parameters and
1121  // the receiver. Every slot contains a pointer.
1122  unsigned arguments = function->shared()->formal_parameter_count() + 1;
1123  return arguments * kPointerSize;
1124 }
1125 
1126 
1127 unsigned Deoptimizer::ComputeOutgoingArgumentSize() const {
1128  DeoptimizationInputData* data = DeoptimizationInputData::cast(
1129  optimized_code_->deoptimization_data());
1130  unsigned height = data->ArgumentsStackHeight(bailout_id_)->value();
1131  return height * kPointerSize;
1132 }
1133 
1134 
1135 Object* Deoptimizer::ComputeLiteral(int index) const {
1136  DeoptimizationInputData* data = DeoptimizationInputData::cast(
1137  optimized_code_->deoptimization_data());
1138  FixedArray* literals = data->LiteralArray();
1139  return literals->get(index);
1140 }
1141 
1142 
1143 void Deoptimizer::AddDoubleValue(intptr_t slot_address,
1144  double value) {
1145  HeapNumberMaterializationDescriptor value_desc(
1146  reinterpret_cast<Address>(slot_address), value);
1147  deferred_heap_numbers_.Add(value_desc);
1148 }
1149 
1150 
1151 MemoryChunk* Deoptimizer::CreateCode(BailoutType type) {
1152  // We cannot run this if the serializer is enabled because this will
1153  // cause us to emit relocation information for the external
1154  // references. This is fine because the deoptimizer's code section
1155  // isn't meant to be serialized at all.
1157 
1158  MacroAssembler masm(Isolate::Current(), NULL, 16 * KB);
1159  masm.set_emit_debug_code(false);
1160  GenerateDeoptimizationEntries(&masm, kNumberOfEntries, type);
1161  CodeDesc desc;
1162  masm.GetCode(&desc);
1163  ASSERT(desc.reloc_size == 0);
1164 
1165  MemoryChunk* chunk =
1166  Isolate::Current()->memory_allocator()->AllocateChunk(desc.instr_size,
1167  EXECUTABLE,
1168  NULL);
1169  ASSERT(chunk->area_size() >= desc.instr_size);
1170  if (chunk == NULL) {
1171  V8::FatalProcessOutOfMemory("Not enough memory for deoptimization table");
1172  }
1173  memcpy(chunk->area_start(), desc.buffer, desc.instr_size);
1174  CPU::FlushICache(chunk->area_start(), desc.instr_size);
1175  return chunk;
1176 }
1177 
1178 
1179 Code* Deoptimizer::FindDeoptimizingCodeFromAddress(Address addr) {
1180  DeoptimizingCodeListNode* node =
1181  Isolate::Current()->deoptimizer_data()->deoptimizing_code_list_;
1182  while (node != NULL) {
1183  if (node->code()->contains(addr)) return *node->code();
1184  node = node->next();
1185  }
1186  return NULL;
1187 }
1188 
1189 
1190 void Deoptimizer::RemoveDeoptimizingCode(Code* code) {
1191  DeoptimizerData* data = Isolate::Current()->deoptimizer_data();
1192  ASSERT(data->deoptimizing_code_list_ != NULL);
1193  // Run through the code objects to find this one and remove it.
1195  DeoptimizingCodeListNode* current = data->deoptimizing_code_list_;
1196  while (current != NULL) {
1197  if (*current->code() == code) {
1198  // Unlink from list. If prev is NULL we are looking at the first element.
1199  if (prev == NULL) {
1200  data->deoptimizing_code_list_ = current->next();
1201  } else {
1202  prev->set_next(current->next());
1203  }
1204  delete current;
1205  return;
1206  }
1207  // Move to next in list.
1208  prev = current;
1209  current = current->next();
1210  }
1211  // Deoptimizing code is removed through weak callback. Each object is expected
1212  // to be removed once and only once.
1213  UNREACHABLE();
1214 }
1215 
1216 
1218  JSFunction* function)
1219  : frame_size_(frame_size),
1220  function_(function),
1221  top_(kZapUint32),
1222  pc_(kZapUint32),
1223  fp_(kZapUint32),
1224  context_(kZapUint32) {
1225  // Zap all the registers.
1226  for (int r = 0; r < Register::kNumRegisters; r++) {
1227  SetRegister(r, kZapUint32);
1228  }
1229 
1230  // Zap all the slots.
1231  for (unsigned o = 0; o < frame_size; o += kPointerSize) {
1232  SetFrameSlot(o, kZapUint32);
1233  }
1234 }
1235 
1236 
1237 int FrameDescription::ComputeFixedSize() {
1240 }
1241 
1242 
1244  if (slot_index >= 0) {
1245  // Local or spill slots. Skip the fixed part of the frame
1246  // including all arguments.
1247  unsigned base = GetFrameSize() - ComputeFixedSize();
1248  return base - ((slot_index + 1) * kPointerSize);
1249  } else {
1250  // Incoming parameter.
1251  int arg_size = (ComputeParametersCount() + 1) * kPointerSize;
1252  unsigned base = GetFrameSize() - arg_size;
1253  return base - ((slot_index + 1) * kPointerSize);
1254  }
1255 }
1256 
1257 
1259  switch (type_) {
1260  case StackFrame::JAVA_SCRIPT:
1261  return function_->shared()->formal_parameter_count();
1263  // Last slot contains number of incomming arguments as a smi.
1264  // Can't use GetExpression(0) because it would cause infinite recursion.
1265  return reinterpret_cast<Smi*>(*GetFrameSlotPointer(0))->value();
1266  }
1267  default:
1268  UNREACHABLE();
1269  return 0;
1270  }
1271 }
1272 
1273 
1275  ASSERT(index >= 0);
1276  ASSERT(index < ComputeParametersCount());
1277  // The slot indexes for incoming arguments are negative.
1278  unsigned offset = GetOffsetFromSlotIndex(index - ComputeParametersCount());
1279  return reinterpret_cast<Object*>(*GetFrameSlotPointer(offset));
1280 }
1281 
1282 
1284  ASSERT_EQ(StackFrame::JAVA_SCRIPT, type_);
1285  unsigned size = GetFrameSize() - ComputeFixedSize();
1286  return size / kPointerSize;
1287 }
1288 
1289 
1291  ASSERT_EQ(StackFrame::JAVA_SCRIPT, type_);
1292  unsigned offset = GetOffsetFromSlotIndex(index);
1293  return reinterpret_cast<Object*>(*GetFrameSlotPointer(offset));
1294 }
1295 
1296 
1297 void TranslationBuffer::Add(int32_t value, Zone* zone) {
1298  // Encode the sign bit in the least significant bit.
1299  bool is_negative = (value < 0);
1300  uint32_t bits = ((is_negative ? -value : value) << 1) |
1301  static_cast<int32_t>(is_negative);
1302  // Encode the individual bytes using the least significant bit of
1303  // each byte to indicate whether or not more bytes follow.
1304  do {
1305  uint32_t next = bits >> 7;
1306  contents_.Add(((bits << 1) & 0xFF) | (next != 0), zone);
1307  bits = next;
1308  } while (bits != 0);
1309 }
1310 
1311 
1312 int32_t TranslationIterator::Next() {
1313  // Run through the bytes until we reach one with a least significant
1314  // bit of zero (marks the end).
1315  uint32_t bits = 0;
1316  for (int i = 0; true; i += 7) {
1317  ASSERT(HasNext());
1318  uint8_t next = buffer_->get(index_++);
1319  bits |= (next >> 1) << i;
1320  if ((next & 1) == 0) break;
1321  }
1322  // The bits encode the sign in the least significant bit.
1323  bool is_negative = (bits & 1) == 1;
1324  int32_t result = bits >> 1;
1325  return is_negative ? -result : result;
1326 }
1327 
1328 
1329 Handle<ByteArray> TranslationBuffer::CreateByteArray() {
1330  int length = contents_.length();
1331  Handle<ByteArray> result =
1332  Isolate::Current()->factory()->NewByteArray(length, TENURED);
1333  memcpy(result->GetDataStartAddress(), contents_.ToVector().start(), length);
1334  return result;
1335 }
1336 
1337 
1338 void Translation::BeginConstructStubFrame(int literal_id, unsigned height) {
1339  buffer_->Add(CONSTRUCT_STUB_FRAME, zone());
1340  buffer_->Add(literal_id, zone());
1341  buffer_->Add(height, zone());
1342 }
1343 
1344 
1345 void Translation::BeginArgumentsAdaptorFrame(int literal_id, unsigned height) {
1346  buffer_->Add(ARGUMENTS_ADAPTOR_FRAME, zone());
1347  buffer_->Add(literal_id, zone());
1348  buffer_->Add(height, zone());
1349 }
1350 
1351 
1352 void Translation::BeginJSFrame(int node_id, int literal_id, unsigned height) {
1353  buffer_->Add(JS_FRAME, zone());
1354  buffer_->Add(node_id, zone());
1355  buffer_->Add(literal_id, zone());
1356  buffer_->Add(height, zone());
1357 }
1358 
1359 
1360 void Translation::StoreRegister(Register reg) {
1361  buffer_->Add(REGISTER, zone());
1362  buffer_->Add(reg.code(), zone());
1363 }
1364 
1365 
1366 void Translation::StoreInt32Register(Register reg) {
1367  buffer_->Add(INT32_REGISTER, zone());
1368  buffer_->Add(reg.code(), zone());
1369 }
1370 
1371 
1372 void Translation::StoreDoubleRegister(DoubleRegister reg) {
1373  buffer_->Add(DOUBLE_REGISTER, zone());
1374  buffer_->Add(DoubleRegister::ToAllocationIndex(reg), zone());
1375 }
1376 
1377 
1378 void Translation::StoreStackSlot(int index) {
1379  buffer_->Add(STACK_SLOT, zone());
1380  buffer_->Add(index, zone());
1381 }
1382 
1383 
1384 void Translation::StoreInt32StackSlot(int index) {
1385  buffer_->Add(INT32_STACK_SLOT, zone());
1386  buffer_->Add(index, zone());
1387 }
1388 
1389 
1390 void Translation::StoreDoubleStackSlot(int index) {
1391  buffer_->Add(DOUBLE_STACK_SLOT, zone());
1392  buffer_->Add(index, zone());
1393 }
1394 
1395 
1396 void Translation::StoreLiteral(int literal_id) {
1397  buffer_->Add(LITERAL, zone());
1398  buffer_->Add(literal_id, zone());
1399 }
1400 
1401 
1402 void Translation::StoreArgumentsObject() {
1403  buffer_->Add(ARGUMENTS_OBJECT, zone());
1404 }
1405 
1406 
1407 void Translation::MarkDuplicate() {
1408  buffer_->Add(DUPLICATE, zone());
1409 }
1410 
1411 
1412 int Translation::NumberOfOperandsFor(Opcode opcode) {
1413  switch (opcode) {
1414  case ARGUMENTS_OBJECT:
1415  case DUPLICATE:
1416  return 0;
1417  case REGISTER:
1418  case INT32_REGISTER:
1419  case DOUBLE_REGISTER:
1420  case STACK_SLOT:
1421  case INT32_STACK_SLOT:
1422  case DOUBLE_STACK_SLOT:
1423  case LITERAL:
1424  return 1;
1425  case BEGIN:
1426  case ARGUMENTS_ADAPTOR_FRAME:
1427  case CONSTRUCT_STUB_FRAME:
1428  return 2;
1429  case JS_FRAME:
1430  return 3;
1431  }
1432  UNREACHABLE();
1433  return -1;
1434 }
1435 
1436 
1437 #if defined(OBJECT_PRINT) || defined(ENABLE_DISASSEMBLER)
1438 
1439 const char* Translation::StringFor(Opcode opcode) {
1440  switch (opcode) {
1441  case BEGIN:
1442  return "BEGIN";
1443  case JS_FRAME:
1444  return "JS_FRAME";
1445  case ARGUMENTS_ADAPTOR_FRAME:
1446  return "ARGUMENTS_ADAPTOR_FRAME";
1447  case CONSTRUCT_STUB_FRAME:
1448  return "CONSTRUCT_STUB_FRAME";
1449  case REGISTER:
1450  return "REGISTER";
1451  case INT32_REGISTER:
1452  return "INT32_REGISTER";
1453  case DOUBLE_REGISTER:
1454  return "DOUBLE_REGISTER";
1455  case STACK_SLOT:
1456  return "STACK_SLOT";
1457  case INT32_STACK_SLOT:
1458  return "INT32_STACK_SLOT";
1459  case DOUBLE_STACK_SLOT:
1460  return "DOUBLE_STACK_SLOT";
1461  case LITERAL:
1462  return "LITERAL";
1463  case ARGUMENTS_OBJECT:
1464  return "ARGUMENTS_OBJECT";
1465  case DUPLICATE:
1466  return "DUPLICATE";
1467  }
1468  UNREACHABLE();
1469  return "";
1470 }
1471 
1472 #endif
1473 
1474 
1476  GlobalHandles* global_handles = Isolate::Current()->global_handles();
1477  // Globalize the code object and make it weak.
1478  code_ = Handle<Code>::cast(global_handles->Create(code));
1479  global_handles->MakeWeak(reinterpret_cast<Object**>(code_.location()),
1480  this,
1481  Deoptimizer::HandleWeakDeoptimizedCode);
1482 }
1483 
1484 
1486  GlobalHandles* global_handles = Isolate::Current()->global_handles();
1487  global_handles->Destroy(reinterpret_cast<Object**>(code_.location()));
1488 }
1489 
1490 
1491 // We can't intermix stack decoding and allocations because
1492 // deoptimization infrastracture is not GC safe.
1493 // Thus we build a temporary structure in malloced space.
1494 SlotRef SlotRef::ComputeSlotForNextArgument(TranslationIterator* iterator,
1496  JavaScriptFrame* frame) {
1497  Translation::Opcode opcode =
1498  static_cast<Translation::Opcode>(iterator->Next());
1499 
1500  switch (opcode) {
1501  case Translation::BEGIN:
1502  case Translation::JS_FRAME:
1503  case Translation::ARGUMENTS_ADAPTOR_FRAME:
1504  case Translation::CONSTRUCT_STUB_FRAME:
1505  // Peeled off before getting here.
1506  break;
1507 
1508  case Translation::ARGUMENTS_OBJECT:
1509  // This can be only emitted for local slots not for argument slots.
1510  break;
1511 
1512  case Translation::REGISTER:
1513  case Translation::INT32_REGISTER:
1514  case Translation::DOUBLE_REGISTER:
1515  case Translation::DUPLICATE:
1516  // We are at safepoint which corresponds to call. All registers are
1517  // saved by caller so there would be no live registers at this
1518  // point. Thus these translation commands should not be used.
1519  break;
1520 
1521  case Translation::STACK_SLOT: {
1522  int slot_index = iterator->Next();
1523  Address slot_addr = SlotAddress(frame, slot_index);
1524  return SlotRef(slot_addr, SlotRef::TAGGED);
1525  }
1526 
1527  case Translation::INT32_STACK_SLOT: {
1528  int slot_index = iterator->Next();
1529  Address slot_addr = SlotAddress(frame, slot_index);
1530  return SlotRef(slot_addr, SlotRef::INT32);
1531  }
1532 
1533  case Translation::DOUBLE_STACK_SLOT: {
1534  int slot_index = iterator->Next();
1535  Address slot_addr = SlotAddress(frame, slot_index);
1536  return SlotRef(slot_addr, SlotRef::DOUBLE);
1537  }
1538 
1539  case Translation::LITERAL: {
1540  int literal_index = iterator->Next();
1541  return SlotRef(data->LiteralArray()->get(literal_index));
1542  }
1543  }
1544 
1545  UNREACHABLE();
1546  return SlotRef();
1547 }
1548 
1549 
1550 void SlotRef::ComputeSlotsForArguments(Vector<SlotRef>* args_slots,
1551  TranslationIterator* it,
1552  DeoptimizationInputData* data,
1553  JavaScriptFrame* frame) {
1554  // Process the translation commands for the arguments.
1555 
1556  // Skip the translation command for the receiver.
1557  it->Skip(Translation::NumberOfOperandsFor(
1558  static_cast<Translation::Opcode>(it->Next())));
1559 
1560  // Compute slots for arguments.
1561  for (int i = 0; i < args_slots->length(); ++i) {
1562  (*args_slots)[i] = ComputeSlotForNextArgument(it, data, frame);
1563  }
1564 }
1565 
1566 
1567 Vector<SlotRef> SlotRef::ComputeSlotMappingForArguments(
1568  JavaScriptFrame* frame,
1569  int inlined_jsframe_index,
1570  int formal_parameter_count) {
1571  AssertNoAllocation no_gc;
1572  int deopt_index = AstNode::kNoNumber;
1573  DeoptimizationInputData* data =
1574  static_cast<OptimizedFrame*>(frame)->GetDeoptimizationData(&deopt_index);
1575  TranslationIterator it(data->TranslationByteArray(),
1576  data->TranslationIndex(deopt_index)->value());
1577  Translation::Opcode opcode = static_cast<Translation::Opcode>(it.Next());
1578  ASSERT(opcode == Translation::BEGIN);
1579  it.Next(); // Drop frame count.
1580  int jsframe_count = it.Next();
1581  USE(jsframe_count);
1582  ASSERT(jsframe_count > inlined_jsframe_index);
1583  int jsframes_to_skip = inlined_jsframe_index;
1584  while (true) {
1585  opcode = static_cast<Translation::Opcode>(it.Next());
1586  if (opcode == Translation::ARGUMENTS_ADAPTOR_FRAME) {
1587  if (jsframes_to_skip == 0) {
1588  ASSERT(Translation::NumberOfOperandsFor(opcode) == 2);
1589 
1590  it.Skip(1); // literal id
1591  int height = it.Next();
1592 
1593  // We reached the arguments adaptor frame corresponding to the
1594  // inlined function in question. Number of arguments is height - 1.
1595  Vector<SlotRef> args_slots =
1596  Vector<SlotRef>::New(height - 1); // Minus receiver.
1597  ComputeSlotsForArguments(&args_slots, &it, data, frame);
1598  return args_slots;
1599  }
1600  } else if (opcode == Translation::JS_FRAME) {
1601  if (jsframes_to_skip == 0) {
1602  // Skip over operands to advance to the next opcode.
1603  it.Skip(Translation::NumberOfOperandsFor(opcode));
1604 
1605  // We reached the frame corresponding to the inlined function
1606  // in question. Process the translation commands for the
1607  // arguments. Number of arguments is equal to the number of
1608  // format parameter count.
1609  Vector<SlotRef> args_slots =
1610  Vector<SlotRef>::New(formal_parameter_count);
1611  ComputeSlotsForArguments(&args_slots, &it, data, frame);
1612  return args_slots;
1613  }
1614  jsframes_to_skip--;
1615  }
1616 
1617  // Skip over operands to advance to the next opcode.
1618  it.Skip(Translation::NumberOfOperandsFor(opcode));
1619  }
1620 
1621  UNREACHABLE();
1622  return Vector<SlotRef>();
1623 }
1624 
1625 #ifdef ENABLE_DEBUGGER_SUPPORT
1626 
1627 DeoptimizedFrameInfo::DeoptimizedFrameInfo(Deoptimizer* deoptimizer,
1628  int frame_index,
1629  bool has_arguments_adaptor,
1630  bool has_construct_stub) {
1631  FrameDescription* output_frame = deoptimizer->output_[frame_index];
1632  function_ = output_frame->GetFunction();
1633  has_construct_stub_ = has_construct_stub;
1634  expression_count_ = output_frame->GetExpressionCount();
1635  expression_stack_ = new Object*[expression_count_];
1636  // Get the source position using the unoptimized code.
1637  Address pc = reinterpret_cast<Address>(output_frame->GetPc());
1638  Code* code = Code::cast(Isolate::Current()->heap()->FindCodeObject(pc));
1639  source_position_ = code->SourcePosition(pc);
1640 
1641  for (int i = 0; i < expression_count_; i++) {
1642  SetExpression(i, output_frame->GetExpression(i));
1643  }
1644 
1645  if (has_arguments_adaptor) {
1646  output_frame = deoptimizer->output_[frame_index - 1];
1647  ASSERT(output_frame->GetFrameType() == StackFrame::ARGUMENTS_ADAPTOR);
1648  }
1649 
1650  parameters_count_ = output_frame->ComputeParametersCount();
1651  parameters_ = new Object*[parameters_count_];
1652  for (int i = 0; i < parameters_count_; i++) {
1653  SetParameter(i, output_frame->GetParameter(i));
1654  }
1655 }
1656 
1657 
1658 DeoptimizedFrameInfo::~DeoptimizedFrameInfo() {
1659  delete[] expression_stack_;
1660  delete[] parameters_;
1661 }
1662 
1663 
1664 void DeoptimizedFrameInfo::Iterate(ObjectVisitor* v) {
1665  v->VisitPointer(BitCast<Object**>(&function_));
1666  v->VisitPointers(parameters_, parameters_ + parameters_count_);
1667  v->VisitPointers(expression_stack_, expression_stack_ + expression_count_);
1668 }
1669 
1670 #endif // ENABLE_DEBUGGER_SUPPORT
1671 
1672 } } // namespace v8::internal
Object * GetPrototype()
Definition: objects.cc:709
byte * Address
Definition: globals.h:172
DeoptimizingCodeListNode * next() const
Definition: deoptimizer.h:625
void Destroy(Object **location)
Object * function() const
Definition: frames-inl.h:231
static Object *& Object_at(Address addr)
Definition: v8memory.h:75
#define V8PRIxPTR
Definition: globals.h:204
void PrintF(const char *format,...)
Definition: v8utils.cc:40
unsigned stack_slots()
Definition: objects-inl.h:3171
static int64_t Ticks()
REGISTER(no_reg,-1)
void SourceCodePrint(StringStream *accumulator, int max_length)
Definition: objects.cc:7804
static Smi * FromInt(int value)
Definition: objects-inl.h:973
unsigned stack_check_table_offset()
Definition: objects-inl.h:3196
const int KB
Definition: globals.h:221
unsigned GetOffsetFromSlotIndex(int slot_index)
static void RevertStackCheckCode(Code *unoptimized_code, Code *check_code, Code *replacement_code)
void SetFrameSlot(unsigned offset, intptr_t value)
Definition: deoptimizer.h:399
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset kAccessCheckInfoOffset kEvalFrominstructionsOffsetOffset kThisPropertyAssignmentsOffset kNeedsAccessCheckBit kIsExpressionBit kHasOnlySimpleThisPropertyAssignments kUsesArguments formal_parameter_count
Definition: objects-inl.h:3755
static void ComputeOutputFrames(Deoptimizer *deoptimizer)
Definition: deoptimizer.cc:339
static Handle< T > cast(Handle< S > that)
Definition: handles.h:81
static const int kNoNumber
Definition: ast.h:197
int int32_t
Definition: unicode.cc:47
virtual void EnterContext(Context *context)
Definition: deoptimizer.cc:232
#define REGISTER(N, C)
static bool enabled()
Definition: serialize.h:480
#define ASSERT(condition)
Definition: checks.h:270
static void DeoptimizeFunction(JSFunction *function)
static Context * cast(Object *context)
Definition: contexts.h:207
static void DeoptimizeAll()
Definition: deoptimizer.cc:249
double GetDoubleRegister(unsigned n) const
Definition: deoptimizer.h:408
Handle< Object > NewNumber(double value, PretenureFlag pretenure=NOT_TENURED)
Definition: factory.cc:581
void SetFrameType(StackFrame::Type type)
Definition: deoptimizer.h:441
StringInputBuffer *const buffer_
Factory * factory()
Definition: isolate.h:977
virtual void VisitFunction(JSFunction *function)
Definition: deoptimizer.cc:239
static Code * cast(Object *obj)
const int kIntSize
Definition: globals.h:231
static Smi * cast(Object *object)
static const char * AllocationIndexToString(int index)
static const int kNumRegisters
Definition: assembler-arm.h:73
virtual void LeaveContext(Context *context)
Definition: deoptimizer.cc:243
bool contains(byte *pc)
Definition: objects-inl.h:4417
void ClearOptimizedFunctions()
Definition: contexts.cc:301
SmartArrayPointer< char > ToCString(AllowNullsFlag allow_nulls, RobustnessFlag robustness_flag, int offset, int length, int *length_output=0)
Definition: objects.cc:6161
JSFunction * GetFunction() const
Definition: deoptimizer.h:374
static void DeoptimizeGlobalObject(JSObject *object)
Definition: deoptimizer.cc:261
#define UNREACHABLE()
Definition: checks.h:50
DwVfpRegister DoubleRegister
virtual const char * NameOfCPURegister(int reg) const
Handle< Object > Create(Object *value)
static bool IsValid(intptr_t value)
Definition: objects-inl.h:1051
byte * instruction_start()
Definition: objects-inl.h:4376
const int kPointerSize
Definition: globals.h:234
Object * OptimizedFunctionsListHead()
Definition: contexts.cc:295
void SetRegister(unsigned n, intptr_t value)
Definition: deoptimizer.h:413
const Register pc
friend class DeoptimizingCodeListNode
Definition: deoptimizer.h:345
static int GetDeoptimizedCodeCount(Isolate *isolate)
Definition: deoptimizer.cc:512
static const char * State2String(State state)
Definition: full-codegen.h:104
static int GetOutputInfo(DeoptimizationOutputData *data, unsigned node_id, SharedFunctionInfo *shared)
Definition: deoptimizer.cc:486
static void PatchStackCheckCode(Code *unoptimized_code, Code *check_code, Code *replacement_code)
static Vector< T > New(int length)
Definition: utils.h:369
DeoptimizerData * deoptimizer_data()
Definition: isolate.h:823
const int kBitsPerInt
Definition: globals.h:254
uint32_t GetFrameSize() const
Definition: deoptimizer.h:369
static void VisitAllOptimizedFunctionsForContext(Context *context, OptimizedFunctionVisitor *visitor)
Definition: deoptimizer.cc:269
#define ASSERT_LT(v1, v2)
Definition: checks.h:274
#define V8PRIdPTR
Definition: globals.h:205
static void RevertStackCheckCodeAt(Code *unoptimized_code, Address pc_after, Code *check_code, Code *replacement_code)
int32_t DoubleToInt32(double x)
intptr_t GetFrameSlot(unsigned offset)
Definition: deoptimizer.h:378
friend class DeoptimizedFrameInfo
Definition: deoptimizer.h:346
static Address GetDeoptimizationEntry(int id, BailoutType type)
Definition: deoptimizer.cc:445
static int ToAllocationIndex(DwVfpRegister reg)
static Deoptimizer * Grab(Isolate *isolate)
Definition: deoptimizer.cc:98
static const int kNotDeoptimizationEntry
Definition: deoptimizer.h:230
#define HEAP
Definition: isolate.h:1408
#define ASSERT_EQ(v1, v2)
Definition: checks.h:271
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination trace on stack replacement optimize closures functions with arguments object optimize functions containing for in loops profiler considers IC stability primitive functions trigger their own optimization re try self optimization if it failed insert an interrupt check at function exit execution budget before interrupt is triggered call count before self optimization self_optimization count_based_interrupts weighted_back_edges trace_opt emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 enable use of ARMv7 instructions if enable use of MIPS FPU instructions if NULL
Definition: flags.cc:274
friend class FrameDescription
Definition: deoptimizer.h:344
StackFrame::Type GetFrameType() const
Definition: deoptimizer.h:440
void USE(T)
Definition: globals.h:303
#define ASSERT_NE(v1, v2)
Definition: checks.h:272
double GetDoubleFrameSlot(unsigned offset)
Definition: deoptimizer.h:382
FrameDescription(uint32_t frame_size, JSFunction *function)
void Add(const T &element, AllocationPolicy allocator=AllocationPolicy())
Definition: list-inl.h:38
static void VisitAllOptimizedFunctions(OptimizedFunctionVisitor *visitor)
Definition: deoptimizer.cc:305
double FastI2D(int x)
Definition: conversions.h:73
Object * get(int index)
Definition: objects-inl.h:1675
static uint32_t & uint32_at(Address addr)
Definition: v8memory.h:47
#define RUNTIME_ENTRY(name, nargs, ressize)
static void VisitAllOptimizedFunctionsForGlobalObject(JSObject *object, OptimizedFunctionVisitor *visitor)
Definition: deoptimizer.cc:289
int ConvertJSFrameIndexToFrameIndex(int jsframe_index)
Definition: deoptimizer.cc:108
static void FlushICache(void *start, size_t size)
void MakeWeak(Object **location, void *parameter, WeakReferenceCallback callback)
intptr_t GetRegister(unsigned n) const
Definition: deoptimizer.h:403
static GlobalObject * cast(Object *obj)
SmartArrayPointer< const char > ToCString() const
int jsframe_count() const
Definition: deoptimizer.h:123
static void FatalProcessOutOfMemory(const char *location, bool take_snapshot=false)
static int GetDeoptimizationId(Address addr, BailoutType type)
Definition: deoptimizer.cc:466
static JSObject * cast(Object *obj)
static void PatchStackCheckCodeAt(Code *unoptimized_code, Address pc_after, Code *check_code, Code *replacement_code)
FlagType type() const
Definition: flags.cc:1358
int FastD2I(double x)
Definition: conversions.h:64
static Deoptimizer * New(JSFunction *function, BailoutType type, unsigned bailout_id, Address from, int fp_to_sp_delta, Isolate *isolate)
Definition: deoptimizer.cc:78
static DeoptimizationInputData * cast(Object *obj)
Object * GetExpression(int index)
Object * GetParameter(int index)
static JSFunction * cast(Object *obj)