v8  3.25.30(node0.11.13)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
deoptimizer.h
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #ifndef V8_DEOPTIMIZER_H_
29 #define V8_DEOPTIMIZER_H_
30 
31 #include "v8.h"
32 
33 #include "allocation.h"
34 #include "macro-assembler.h"
35 #include "zone-inl.h"
36 
37 
38 namespace v8 {
39 namespace internal {
40 
41 
42 static inline double read_double_value(Address p) {
43 #ifdef V8_HOST_CAN_READ_UNALIGNED
44  return Memory::double_at(p);
45 #else // V8_HOST_CAN_READ_UNALIGNED
46  // Prevent gcc from using load-double (mips ldc1) on (possibly)
47  // non-64-bit aligned address.
48  union conversion {
49  double d;
50  uint32_t u[2];
51  } c;
52  c.u[0] = *reinterpret_cast<uint32_t*>(p);
53  c.u[1] = *reinterpret_cast<uint32_t*>(p + 4);
54  return c.d;
55 #endif // V8_HOST_CAN_READ_UNALIGNED
56 }
57 
58 
59 class FrameDescription;
60 class TranslationIterator;
61 class DeoptimizedFrameInfo;
62 
63 template<typename T>
64 class HeapNumberMaterializationDescriptor BASE_EMBEDDED {
65  public:
66  HeapNumberMaterializationDescriptor(T destination, double value)
67  : destination_(destination), value_(value) { }
68 
69  T destination() const { return destination_; }
70  double value() const { return value_; }
71 
72  private:
73  T destination_;
74  double value_;
75 };
76 
77 
78 class ObjectMaterializationDescriptor BASE_EMBEDDED {
79  public:
81  Address slot_address, int frame, int length, int duplicate, bool is_args)
82  : slot_address_(slot_address),
83  jsframe_index_(frame),
84  object_length_(length),
85  duplicate_object_(duplicate),
86  is_arguments_(is_args) { }
87 
88  Address slot_address() const { return slot_address_; }
89  int jsframe_index() const { return jsframe_index_; }
90  int object_length() const { return object_length_; }
91  int duplicate_object() const { return duplicate_object_; }
92  bool is_arguments() const { return is_arguments_; }
93 
94  // Only used for allocated receivers in DoComputeConstructStubFrame.
95  void patch_slot_address(intptr_t slot) {
96  slot_address_ = reinterpret_cast<Address>(slot);
97  }
98 
99  private:
100  Address slot_address_;
101  int jsframe_index_;
102  int object_length_;
103  int duplicate_object_;
104  bool is_arguments_;
105 };
106 
107 
108 class OptimizedFunctionVisitor BASE_EMBEDDED {
109  public:
111 
112  // Function which is called before iteration of any optimized functions
113  // from given native context.
114  virtual void EnterContext(Context* context) = 0;
115 
116  virtual void VisitFunction(JSFunction* function) = 0;
117 
118  // Function which is called after iteration of all optimized functions
119  // from given native context.
120  virtual void LeaveContext(Context* context) = 0;
121 };
122 
123 
124 class Deoptimizer : public Malloced {
125  public:
126  enum BailoutType {
130  // This last bailout type is not really a bailout, but used by the
131  // debugger to deoptimize stack frames to allow inspection.
133  };
134 
135  static const int kBailoutTypesWithCodeEntry = SOFT + 1;
136 
137  struct JumpTableEntry : public ZoneObject {
138  inline JumpTableEntry(Address entry,
140  bool frame)
141  : label(),
142  address(entry),
143  bailout_type(type),
144  needs_frame(frame) { }
145  Label label;
149  };
150 
151  static bool TraceEnabledFor(BailoutType deopt_type,
152  StackFrame::Type frame_type);
153  static const char* MessageFor(BailoutType type);
154 
155  int output_count() const { return output_count_; }
156 
157  Handle<JSFunction> function() const { return Handle<JSFunction>(function_); }
158  Handle<Code> compiled_code() const { return Handle<Code>(compiled_code_); }
159  BailoutType bailout_type() const { return bailout_type_; }
160 
161  // Number of created JS frames. Not all created frames are necessarily JS.
162  int jsframe_count() const { return jsframe_count_; }
163 
164  static Deoptimizer* New(JSFunction* function,
165  BailoutType type,
166  unsigned bailout_id,
167  Address from,
168  int fp_to_sp_delta,
169  Isolate* isolate);
170  static Deoptimizer* Grab(Isolate* isolate);
171 
172 #ifdef ENABLE_DEBUGGER_SUPPORT
173  // The returned object with information on the optimized frame needs to be
174  // freed before another one can be generated.
175  static DeoptimizedFrameInfo* DebuggerInspectableFrame(JavaScriptFrame* frame,
176  int jsframe_index,
177  Isolate* isolate);
178  static void DeleteDebuggerInspectableFrame(DeoptimizedFrameInfo* info,
179  Isolate* isolate);
180 #endif
181 
182  // Makes sure that there is enough room in the relocation
183  // information of a code object to perform lazy deoptimization
184  // patching. If there is not enough room a new relocation
185  // information object is allocated and comments are added until it
186  // is big enough.
188 
189  // Deoptimize the function now. Its current optimized code will never be run
190  // again and any activations of the optimized code will get deoptimized when
191  // execution returns.
192  static void DeoptimizeFunction(JSFunction* function);
193 
194  // Deoptimize all code in the given isolate.
195  static void DeoptimizeAll(Isolate* isolate);
196 
197  // Deoptimize code associated with the given global object.
198  static void DeoptimizeGlobalObject(JSObject* object);
199 
200  // Deoptimizes all optimized code that has been previously marked
201  // (via code->set_marked_for_deoptimization) and unlinks all functions that
202  // refer to that code.
203  static void DeoptimizeMarkedCode(Isolate* isolate);
204 
205  // Visit all the known optimized functions in a given isolate.
206  static void VisitAllOptimizedFunctions(
207  Isolate* isolate, OptimizedFunctionVisitor* visitor);
208 
209  // The size in bytes of the code required at a lazy deopt patch site.
210  static int patch_size();
211 
212  ~Deoptimizer();
213 
214  void MaterializeHeapObjects(JavaScriptFrameIterator* it);
215 #ifdef ENABLE_DEBUGGER_SUPPORT
216  void MaterializeHeapNumbersForDebuggerInspectableFrame(
217  Address parameters_top,
218  uint32_t parameters_size,
219  Address expressions_top,
220  uint32_t expressions_size,
222 #endif
223 
224  static void ComputeOutputFrames(Deoptimizer* deoptimizer);
225 
226 
230  };
231 
232 
234  Isolate* isolate,
235  int id,
236  BailoutType type,
238  static int GetDeoptimizationId(Isolate* isolate,
239  Address addr,
240  BailoutType type);
241  static int GetOutputInfo(DeoptimizationOutputData* data,
242  BailoutId node_id,
243  SharedFunctionInfo* shared);
244 
245  // Code generation support.
246  static int input_offset() { return OFFSET_OF(Deoptimizer, input_); }
247  static int output_count_offset() {
248  return OFFSET_OF(Deoptimizer, output_count_);
249  }
250  static int output_offset() { return OFFSET_OF(Deoptimizer, output_); }
251 
253  return OFFSET_OF(Deoptimizer, has_alignment_padding_);
254  }
255 
257 
258  static const int kNotDeoptimizationEntry = -1;
259 
260  // Generators for the deoptimization entry code.
261  class EntryGenerator BASE_EMBEDDED {
262  public:
264  : masm_(masm), type_(type) { }
265  virtual ~EntryGenerator() { }
266 
267  void Generate();
268 
269  protected:
270  MacroAssembler* masm() const { return masm_; }
271  BailoutType type() const { return type_; }
272  Isolate* isolate() const { return masm_->isolate(); }
273 
274  virtual void GeneratePrologue() { }
275 
276  private:
277  MacroAssembler* masm_;
279  };
280 
281  class TableEntryGenerator : public EntryGenerator {
282  public:
284  : EntryGenerator(masm, type), count_(count) { }
285 
286  protected:
287  virtual void GeneratePrologue();
288 
289  private:
290  int count() const { return count_; }
291 
292  int count_;
293  };
294 
295  int ConvertJSFrameIndexToFrameIndex(int jsframe_index);
296 
297  static size_t GetMaxDeoptTableSize();
298 
300  BailoutType type,
301  int max_entry_id);
302 
303  Isolate* isolate() const { return isolate_; }
304 
305  private:
306  static const int kMinNumberOfEntries = 64;
307  static const int kMaxNumberOfEntries = 16384;
308 
310  JSFunction* function,
311  BailoutType type,
312  unsigned bailout_id,
313  Address from,
314  int fp_to_sp_delta,
315  Code* optimized_code);
316  Code* FindOptimizedCode(JSFunction* function, Code* optimized_code);
317  void PrintFunctionName();
318  void DeleteFrameDescriptions();
319 
320  void DoComputeOutputFrames();
321  void DoComputeJSFrame(TranslationIterator* iterator, int frame_index);
322  void DoComputeArgumentsAdaptorFrame(TranslationIterator* iterator,
323  int frame_index);
324  void DoComputeConstructStubFrame(TranslationIterator* iterator,
325  int frame_index);
326  void DoComputeAccessorStubFrame(TranslationIterator* iterator,
327  int frame_index,
328  bool is_setter_stub_frame);
329  void DoComputeCompiledStubFrame(TranslationIterator* iterator,
330  int frame_index);
331 
332  void DoTranslateObject(TranslationIterator* iterator,
333  int object_index,
334  int field_index);
335 
336  void DoTranslateCommand(TranslationIterator* iterator,
337  int frame_index,
338  unsigned output_offset);
339 
340  unsigned ComputeInputFrameSize() const;
341  unsigned ComputeFixedSize(JSFunction* function) const;
342 
343  unsigned ComputeIncomingArgumentSize(JSFunction* function) const;
344  unsigned ComputeOutgoingArgumentSize() const;
345 
346  Object* ComputeLiteral(int index) const;
347 
348  void AddObjectStart(intptr_t slot_address, int argc, bool is_arguments);
349  void AddObjectDuplication(intptr_t slot, int object_index);
350  void AddObjectTaggedValue(intptr_t value);
351  void AddObjectDoubleValue(double value);
352  void AddDoubleValue(intptr_t slot_address, double value);
353 
354  bool ArgumentsObjectIsAdapted(int object_index) {
355  ObjectMaterializationDescriptor desc = deferred_objects_.at(object_index);
356  int reverse_jsframe_index = jsframe_count_ - desc.jsframe_index() - 1;
357  return jsframe_has_adapted_arguments_[reverse_jsframe_index];
358  }
359 
360  Handle<JSFunction> ArgumentsObjectFunction(int object_index) {
361  ObjectMaterializationDescriptor desc = deferred_objects_.at(object_index);
362  int reverse_jsframe_index = jsframe_count_ - desc.jsframe_index() - 1;
363  return jsframe_functions_[reverse_jsframe_index];
364  }
365 
366  // Helper function for heap object materialization.
367  Handle<Object> MaterializeNextHeapObject();
368  Handle<Object> MaterializeNextValue();
369 
370  static void GenerateDeoptimizationEntries(
371  MacroAssembler* masm, int count, BailoutType type);
372 
373  // Marks all the code in the given context for deoptimization.
374  static void MarkAllCodeForContext(Context* native_context);
375 
376  // Visit all the known optimized functions in a given context.
377  static void VisitAllOptimizedFunctionsForContext(
378  Context* context, OptimizedFunctionVisitor* visitor);
379 
380  // Deoptimizes all code marked in the given context.
381  static void DeoptimizeMarkedCodeForContext(Context* native_context);
382 
383  // Patch the given code so that it will deoptimize itself.
384  static void PatchCodeForDeoptimization(Isolate* isolate, Code* code);
385 
386  // Searches the list of known deoptimizing code for a Code object
387  // containing the given address (which is supposedly faster than
388  // searching all code objects).
389  Code* FindDeoptimizingCode(Address addr);
390 
391  // Fill the input from from a JavaScript frame. This is used when
392  // the debugger needs to inspect an optimized frame. For normal
393  // deoptimizations the input frame is filled in generated code.
394  void FillInputFrame(Address tos, JavaScriptFrame* frame);
395 
396  // Fill the given output frame's registers to contain the failure handler
397  // address and the number of parameters for a stub failure trampoline.
398  void SetPlatformCompiledStubRegisters(FrameDescription* output_frame,
399  CodeStubInterfaceDescriptor* desc);
400 
401  // Fill the given output frame's double registers with the original values
402  // from the input frame's double registers.
403  void CopyDoubleRegisters(FrameDescription* output_frame);
404 
405  // Determines whether the input frame contains alignment padding by looking
406  // at the dynamic alignment state slot inside the frame.
407  bool HasAlignmentPadding(JSFunction* function);
408 
409  // Select the version of NotifyStubFailure builtin that either saves or
410  // doesn't save the double registers depending on CPU features.
411  Code* NotifyStubFailureBuiltin();
412 
413  Isolate* isolate_;
414  JSFunction* function_;
415  Code* compiled_code_;
416  unsigned bailout_id_;
417  BailoutType bailout_type_;
418  Address from_;
419  int fp_to_sp_delta_;
420  int has_alignment_padding_;
421 
422  // Input frame description.
423  FrameDescription* input_;
424  // Number of output frames.
425  int output_count_;
426  // Number of output js frames.
427  int jsframe_count_;
428  // Array of output frame descriptions.
429  FrameDescription** output_;
430 
431  // Deferred values to be materialized.
432  List<Object*> deferred_objects_tagged_values_;
433  List<HeapNumberMaterializationDescriptor<int> >
434  deferred_objects_double_values_;
435  List<ObjectMaterializationDescriptor> deferred_objects_;
436  List<HeapNumberMaterializationDescriptor<Address> > deferred_heap_numbers_;
437 
438  // Key for lookup of previously materialized objects
439  Address stack_fp_;
440  Handle<FixedArray> previously_materialized_objects_;
441  int prev_materialized_count_;
442 
443  // Output frame information. Only used during heap object materialization.
444  List<Handle<JSFunction> > jsframe_functions_;
445  List<bool> jsframe_has_adapted_arguments_;
446 
447  // Materialized objects. Only used during heap object materialization.
448  List<Handle<Object> >* materialized_values_;
449  List<Handle<Object> >* materialized_objects_;
450  int materialization_value_index_;
451  int materialization_object_index_;
452 
453 #ifdef DEBUG
454  DisallowHeapAllocation* disallow_heap_allocation_;
455 #endif // DEBUG
456 
457  CodeTracer::Scope* trace_scope_;
458 
459  static const int table_entry_size_;
460 
461  friend class FrameDescription;
462  friend class DeoptimizedFrameInfo;
463 };
464 
465 
467  public:
468  FrameDescription(uint32_t frame_size,
469  JSFunction* function);
470 
471  void* operator new(size_t size, uint32_t frame_size) {
472  // Subtracts kPointerSize, as the member frame_content_ already supplies
473  // the first element of the area to store the frame.
474  return malloc(size + frame_size - kPointerSize);
475  }
476 
477  void operator delete(void* pointer, uint32_t frame_size) {
478  free(pointer);
479  }
480 
481  void operator delete(void* description) {
482  free(description);
483  }
484 
485  uint32_t GetFrameSize() const {
486  ASSERT(static_cast<uint32_t>(frame_size_) == frame_size_);
487  return static_cast<uint32_t>(frame_size_);
488  }
489 
490  JSFunction* GetFunction() const { return function_; }
491 
492  unsigned GetOffsetFromSlotIndex(int slot_index);
493 
494  intptr_t GetFrameSlot(unsigned offset) {
495  return *GetFrameSlotPointer(offset);
496  }
497 
498  double GetDoubleFrameSlot(unsigned offset) {
499  intptr_t* ptr = GetFrameSlotPointer(offset);
500  return read_double_value(reinterpret_cast<Address>(ptr));
501  }
502 
503  void SetFrameSlot(unsigned offset, intptr_t value) {
504  *GetFrameSlotPointer(offset) = value;
505  }
506 
507  void SetCallerPc(unsigned offset, intptr_t value);
508 
509  void SetCallerFp(unsigned offset, intptr_t value);
510 
511  void SetCallerConstantPool(unsigned offset, intptr_t value);
512 
513  intptr_t GetRegister(unsigned n) const {
514 #if DEBUG
515  // This convoluted ASSERT is needed to work around a gcc problem that
516  // improperly detects an array bounds overflow in optimized debug builds
517  // when using a plain ASSERT.
518  if (n >= ARRAY_SIZE(registers_)) {
519  ASSERT(false);
520  return 0;
521  }
522 #endif
523  return registers_[n];
524  }
525 
526  double GetDoubleRegister(unsigned n) const {
527  ASSERT(n < ARRAY_SIZE(double_registers_));
528  return double_registers_[n];
529  }
530 
531  void SetRegister(unsigned n, intptr_t value) {
532  ASSERT(n < ARRAY_SIZE(registers_));
533  registers_[n] = value;
534  }
535 
536  void SetDoubleRegister(unsigned n, double value) {
537  ASSERT(n < ARRAY_SIZE(double_registers_));
538  double_registers_[n] = value;
539  }
540 
541  intptr_t GetTop() const { return top_; }
542  void SetTop(intptr_t top) { top_ = top; }
543 
544  intptr_t GetPc() const { return pc_; }
545  void SetPc(intptr_t pc) { pc_ = pc; }
546 
547  intptr_t GetFp() const { return fp_; }
548  void SetFp(intptr_t fp) { fp_ = fp; }
549 
550  intptr_t GetContext() const { return context_; }
551  void SetContext(intptr_t context) { context_ = context; }
552 
553  intptr_t GetConstantPool() const { return constant_pool_; }
554  void SetConstantPool(intptr_t constant_pool) {
555  constant_pool_ = constant_pool;
556  }
557 
558  Smi* GetState() const { return state_; }
559  void SetState(Smi* state) { state_ = state; }
560 
561  void SetContinuation(intptr_t pc) { continuation_ = pc; }
562 
563  StackFrame::Type GetFrameType() const { return type_; }
564  void SetFrameType(StackFrame::Type type) { type_ = type; }
565 
566  // Get the incoming arguments count.
568 
569  // Get a parameter value for an unoptimized frame.
570  Object* GetParameter(int index);
571 
572  // Get the expression stack height for a unoptimized frame.
573  unsigned GetExpressionCount();
574 
575  // Get the expression stack value for an unoptimized frame.
576  Object* GetExpression(int index);
577 
578  static int registers_offset() {
579  return OFFSET_OF(FrameDescription, registers_);
580  }
581 
582  static int double_registers_offset() {
583  return OFFSET_OF(FrameDescription, double_registers_);
584  }
585 
586  static int frame_size_offset() {
587  return OFFSET_OF(FrameDescription, frame_size_);
588  }
589 
590  static int pc_offset() {
591  return OFFSET_OF(FrameDescription, pc_);
592  }
593 
594  static int state_offset() {
595  return OFFSET_OF(FrameDescription, state_);
596  }
597 
598  static int continuation_offset() {
599  return OFFSET_OF(FrameDescription, continuation_);
600  }
601 
602  static int frame_content_offset() {
603  return OFFSET_OF(FrameDescription, frame_content_);
604  }
605 
606  private:
607  static const uint32_t kZapUint32 = 0xbeeddead;
608 
609  // Frame_size_ must hold a uint32_t value. It is only a uintptr_t to
610  // keep the variable-size array frame_content_ of type intptr_t at
611  // the end of the structure aligned.
612  uintptr_t frame_size_; // Number of bytes.
613  JSFunction* function_;
614  intptr_t registers_[Register::kNumRegisters];
615  double double_registers_[DoubleRegister::kMaxNumRegisters];
616  intptr_t top_;
617  intptr_t pc_;
618  intptr_t fp_;
619  intptr_t context_;
620  intptr_t constant_pool_;
621  StackFrame::Type type_;
622  Smi* state_;
623 
624  // Continuation is the PC where the execution continues after
625  // deoptimizing.
626  intptr_t continuation_;
627 
628  // This must be at the end of the object as the object is allocated larger
629  // than it's definition indicate to extend this array.
630  intptr_t frame_content_[1];
631 
632  intptr_t* GetFrameSlotPointer(unsigned offset) {
633  ASSERT(offset < frame_size_);
634  return reinterpret_cast<intptr_t*>(
635  reinterpret_cast<Address>(this) + frame_content_offset() + offset);
636  }
637 
638  int ComputeFixedSize();
639 };
640 
641 
643  public:
644  explicit DeoptimizerData(MemoryAllocator* allocator);
646 
647 #ifdef ENABLE_DEBUGGER_SUPPORT
648  void Iterate(ObjectVisitor* v);
649 #endif
650 
651  private:
652  MemoryAllocator* allocator_;
653  int deopt_entry_code_entries_[Deoptimizer::kBailoutTypesWithCodeEntry];
655 
656 #ifdef ENABLE_DEBUGGER_SUPPORT
657  DeoptimizedFrameInfo* deoptimized_frame_info_;
658 #endif
659 
660  Deoptimizer* current_;
661 
662  friend class Deoptimizer;
663 
665 };
666 
667 
668 class TranslationBuffer BASE_EMBEDDED {
669  public:
670  explicit TranslationBuffer(Zone* zone) : contents_(256, zone) { }
671 
672  int CurrentIndex() const { return contents_.length(); }
673  void Add(int32_t value, Zone* zone);
674 
675  Handle<ByteArray> CreateByteArray(Factory* factory);
676 
677  private:
678  ZoneList<uint8_t> contents_;
679 };
680 
681 
682 class TranslationIterator BASE_EMBEDDED {
683  public:
684  TranslationIterator(ByteArray* buffer, int index)
685  : buffer_(buffer), index_(index) {
686  ASSERT(index >= 0 && index < buffer->length());
687  }
688 
689  int32_t Next();
690 
691  bool HasNext() const { return index_ < buffer_->length(); }
692 
693  void Skip(int n) {
694  for (int i = 0; i < n; i++) Next();
695  }
696 
697  private:
698  ByteArray* buffer_;
699  int index_;
700 };
701 
702 
703 #define TRANSLATION_OPCODE_LIST(V) \
704  V(BEGIN) \
705  V(JS_FRAME) \
706  V(CONSTRUCT_STUB_FRAME) \
707  V(GETTER_STUB_FRAME) \
708  V(SETTER_STUB_FRAME) \
709  V(ARGUMENTS_ADAPTOR_FRAME) \
710  V(COMPILED_STUB_FRAME) \
711  V(DUPLICATED_OBJECT) \
712  V(ARGUMENTS_OBJECT) \
713  V(CAPTURED_OBJECT) \
714  V(REGISTER) \
715  V(INT32_REGISTER) \
716  V(UINT32_REGISTER) \
717  V(DOUBLE_REGISTER) \
718  V(STACK_SLOT) \
719  V(INT32_STACK_SLOT) \
720  V(UINT32_STACK_SLOT) \
721  V(DOUBLE_STACK_SLOT) \
722  V(LITERAL)
723 
724 
725 class Translation BASE_EMBEDDED {
726  public:
727 #define DECLARE_TRANSLATION_OPCODE_ENUM(item) item,
728  enum Opcode {
730  LAST = LITERAL
731  };
732 #undef DECLARE_TRANSLATION_OPCODE_ENUM
733 
734  Translation(TranslationBuffer* buffer, int frame_count, int jsframe_count,
735  Zone* zone)
736  : buffer_(buffer),
737  index_(buffer->CurrentIndex()),
738  zone_(zone) {
739  buffer_->Add(BEGIN, zone);
740  buffer_->Add(frame_count, zone);
741  buffer_->Add(jsframe_count, zone);
742  }
743 
744  int index() const { return index_; }
745 
746  // Commands.
747  void BeginJSFrame(BailoutId node_id, int literal_id, unsigned height);
748  void BeginCompiledStubFrame();
749  void BeginArgumentsAdaptorFrame(int literal_id, unsigned height);
750  void BeginConstructStubFrame(int literal_id, unsigned height);
751  void BeginGetterStubFrame(int literal_id);
752  void BeginSetterStubFrame(int literal_id);
753  void BeginArgumentsObject(int args_length);
754  void BeginCapturedObject(int length);
755  void DuplicateObject(int object_index);
756  void StoreRegister(Register reg);
757  void StoreInt32Register(Register reg);
758  void StoreUint32Register(Register reg);
759  void StoreDoubleRegister(DoubleRegister reg);
760  void StoreStackSlot(int index);
761  void StoreInt32StackSlot(int index);
762  void StoreUint32StackSlot(int index);
763  void StoreDoubleStackSlot(int index);
764  void StoreLiteral(int literal_id);
765  void StoreArgumentsObject(bool args_known, int args_index, int args_length);
766 
767  Zone* zone() const { return zone_; }
768 
769  static int NumberOfOperandsFor(Opcode opcode);
770 
771 #if defined(OBJECT_PRINT) || defined(ENABLE_DISASSEMBLER)
772  static const char* StringFor(Opcode opcode);
773 #endif
774 
775  // A literal id which refers to the JSFunction itself.
776  static const int kSelfLiteralId = -239;
777 
778  private:
779  TranslationBuffer* buffer_;
780  int index_;
781  Zone* zone_;
782 };
783 
784 
785 class SlotRef BASE_EMBEDDED {
786  public:
794  DEFERRED_OBJECT, // Object captured by the escape analysis.
795  // The number of nested objects can be obtained
796  // with the DeferredObjectLength() method
797  // (the SlotRefs of the nested objects follow
798  // this SlotRef in the depth-first order.)
799  DUPLICATE_OBJECT, // Duplicated object of a deferred object.
800  ARGUMENTS_OBJECT // Arguments object - only used to keep indexing
801  // in sync, it should not be materialized.
802  };
803 
805  : addr_(NULL), representation_(UNKNOWN) { }
806 
807  SlotRef(Address addr, SlotRepresentation representation)
808  : addr_(addr), representation_(representation) { }
809 
810  SlotRef(Isolate* isolate, Object* literal)
811  : literal_(literal, isolate), representation_(LITERAL) { }
812 
813  static SlotRef NewArgumentsObject(int length) {
814  SlotRef slot;
815  slot.representation_ = ARGUMENTS_OBJECT;
816  slot.deferred_object_length_ = length;
817  return slot;
818  }
819 
820  static SlotRef NewDeferredObject(int length) {
821  SlotRef slot;
822  slot.representation_ = DEFERRED_OBJECT;
823  slot.deferred_object_length_ = length;
824  return slot;
825  }
826 
827  SlotRepresentation Representation() { return representation_; }
828 
829  static SlotRef NewDuplicateObject(int id) {
830  SlotRef slot;
831  slot.representation_ = DUPLICATE_OBJECT;
832  slot.duplicate_object_id_ = id;
833  return slot;
834  }
835 
837  if (representation_ == DEFERRED_OBJECT ||
838  representation_ == ARGUMENTS_OBJECT) {
839  return deferred_object_length_;
840  } else {
841  return 0;
842  }
843  }
844 
845  int DuplicateObjectId() { return duplicate_object_id_; }
846 
847  Handle<Object> GetValue(Isolate* isolate);
848 
849  private:
850  Address addr_;
851  Handle<Object> literal_;
852  SlotRepresentation representation_;
853  int deferred_object_length_;
854  int duplicate_object_id_;
855 };
856 
857 class SlotRefValueBuilder BASE_EMBEDDED {
858  public:
859  SlotRefValueBuilder(
860  JavaScriptFrame* frame,
861  int inlined_frame_index,
863 
864  void Prepare(Isolate* isolate);
865  Handle<Object> GetNext(Isolate* isolate, int level);
866  void Finish(Isolate* isolate);
867 
868  int args_length() { return args_length_; }
869 
870  private:
871  List<Handle<Object> > materialized_objects_;
872  Handle<FixedArray> previously_materialized_objects_;
873  int prev_materialized_count_;
874  Address stack_frame_id_;
875  List<SlotRef> slot_refs_;
876  int current_slot_;
877  int args_length_;
878  int first_slot_index_;
879 
880  static SlotRef ComputeSlotForNextArgument(
881  Translation::Opcode opcode,
882  TranslationIterator* iterator,
884  JavaScriptFrame* frame);
885 
886  Handle<Object> GetPreviouslyMaterialized(Isolate* isolate, int length);
887 
888  static Address SlotAddress(JavaScriptFrame* frame, int slot_index) {
889  if (slot_index >= 0) {
890  const int offset = JavaScriptFrameConstants::kLocal0Offset;
891  return frame->fp() + offset - (slot_index * kPointerSize);
892  } else {
894  return frame->fp() + offset - ((slot_index + 1) * kPointerSize);
895  }
896  }
897 
898  Handle<Object> GetDeferredObject(Isolate* isolate);
899 };
900 
902  public:
903  explicit MaterializedObjectStore(Isolate* isolate) : isolate_(isolate) {
904  }
905 
907  void Set(Address fp, Handle<FixedArray> materialized_objects);
908  void Remove(Address fp);
909 
910  private:
911  Isolate* isolate() { return isolate_; }
912  Handle<FixedArray> GetStackEntries();
913  Handle<FixedArray> EnsureStackEntries(int size);
914 
915  int StackIdToIndex(Address fp);
916 
917  Isolate* isolate_;
918  List<Address> frame_fps_;
919 };
920 
921 
922 #ifdef ENABLE_DEBUGGER_SUPPORT
923 // Class used to represent an unoptimized frame when the debugger
924 // needs to inspect a frame that is part of an optimized frame. The
925 // internally used FrameDescription objects are not GC safe so for use
926 // by the debugger frame information is copied to an object of this type.
927 // Represents parameters in unadapted form so their number might mismatch
928 // formal parameter count.
929 class DeoptimizedFrameInfo : public Malloced {
930  public:
931  DeoptimizedFrameInfo(Deoptimizer* deoptimizer,
932  int frame_index,
933  bool has_arguments_adaptor,
934  bool has_construct_stub);
935  virtual ~DeoptimizedFrameInfo();
936 
937  // GC support.
938  void Iterate(ObjectVisitor* v);
939 
940  // Return the number of incoming arguments.
941  int parameters_count() { return parameters_count_; }
942 
943  // Return the height of the expression stack.
944  int expression_count() { return expression_count_; }
945 
946  // Get the frame function.
947  JSFunction* GetFunction() {
948  return function_;
949  }
950 
951  // Check if this frame is preceded by construct stub frame. The bottom-most
952  // inlined frame might still be called by an uninlined construct stub.
953  bool HasConstructStub() {
954  return has_construct_stub_;
955  }
956 
957  // Get an incoming argument.
958  Object* GetParameter(int index) {
959  ASSERT(0 <= index && index < parameters_count());
960  return parameters_[index];
961  }
962 
963  // Get an expression from the expression stack.
964  Object* GetExpression(int index) {
965  ASSERT(0 <= index && index < expression_count());
966  return expression_stack_[index];
967  }
968 
969  int GetSourcePosition() {
970  return source_position_;
971  }
972 
973  private:
974  // Set an incoming argument.
975  void SetParameter(int index, Object* obj) {
976  ASSERT(0 <= index && index < parameters_count());
977  parameters_[index] = obj;
978  }
979 
980  // Set an expression on the expression stack.
981  void SetExpression(int index, Object* obj) {
982  ASSERT(0 <= index && index < expression_count());
983  expression_stack_[index] = obj;
984  }
985 
986  JSFunction* function_;
987  bool has_construct_stub_;
988  int parameters_count_;
989  int expression_count_;
990  Object** parameters_;
991  Object** expression_stack_;
992  int source_position_;
993 
994  friend class Deoptimizer;
995 };
996 #endif
997 
998 } } // namespace v8::internal
999 
1000 #endif // V8_DEOPTIMIZER_H_
byte * Address
Definition: globals.h:186
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter NULL
Definition: flags.cc:269
void SetCallerPc(unsigned offset, intptr_t value)
Label label
Definition: deoptimizer.h:145
SlotRepresentation Representation()
Definition: deoptimizer.h:827
static const int kBailoutTypesWithCodeEntry
Definition: deoptimizer.h:135
static void EnsureCodeForDeoptimizationEntry(Isolate *isolate, BailoutType type, int max_entry_id)
Address address
Definition: deoptimizer.h:146
int duplicate_object() const
Definition: deoptimizer.h:91
Deoptimizer::BailoutType bailout_type
Definition: deoptimizer.h:147
unsigned GetOffsetFromSlotIndex(int slot_index)
static double & double_at(Address addr)
Definition: v8memory.h:75
void SetFrameSlot(unsigned offset, intptr_t value)
Definition: deoptimizer.h:503
static void ComputeOutputFrames(Deoptimizer *deoptimizer)
Definition: deoptimizer.cc:526
Address slot_address() const
Definition: deoptimizer.h:88
SlotRef(Isolate *isolate, Object *literal)
Definition: deoptimizer.h:810
static SlotRef NewDeferredObject(int length)
Definition: deoptimizer.h:820
BailoutType bailout_type() const
Definition: deoptimizer.h:159
kSerializedDataOffset Object
Definition: objects-inl.h:5016
T & at(int i) const
Definition: list.h:90
TypeImpl< ZoneTypeConfig > Type
int int32_t
Definition: unicode.cc:47
DeoptimizerData(MemoryAllocator *allocator)
Definition: deoptimizer.cc:57
Definition: deoptimizer.h:281
HeapNumberMaterializationDescriptor(T destination, double value)
Definition: deoptimizer.h:66
static const char * MessageFor(BailoutType type)
Definition: deoptimizer.cc:547
SlotRef(Address addr, SlotRepresentation representation)
Definition: deoptimizer.h:807
#define ASSERT(condition)
Definition: checks.h:329
static void DeoptimizeFunction(JSFunction *function)
Definition: deoptimizer.cc:514
void patch_slot_address(intptr_t slot)
Definition: deoptimizer.h:95
void SetConstantPool(intptr_t constant_pool)
Definition: deoptimizer.h:554
double GetDoubleRegister(unsigned n) const
Definition: deoptimizer.h:526
intptr_t GetContext() const
Definition: deoptimizer.h:550
void MaterializeHeapObjects(JavaScriptFrameIterator *it)
void SetFrameType(StackFrame::Type type)
Definition: deoptimizer.h:564
static bool TraceEnabledFor(BailoutType deopt_type, StackFrame::Type frame_type)
Definition: deoptimizer.cc:531
static SlotRef NewDuplicateObject(int id)
Definition: deoptimizer.h:829
static const int kNumRegisters
static int double_registers_offset()
Definition: deoptimizer.h:582
MacroAssembler * masm() const
Definition: deoptimizer.h:270
static void VisitAllOptimizedFunctions(Isolate *isolate, OptimizedFunctionVisitor *visitor)
Definition: deoptimizer.cc:312
JSFunction * GetFunction() const
Definition: deoptimizer.h:490
Handle< Code > compiled_code() const
Definition: deoptimizer.h:158
static void DeoptimizeGlobalObject(JSObject *object)
Definition: deoptimizer.cc:483
static int output_offset()
Definition: deoptimizer.h:250
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_string(expose_natives_as
Isolate * isolate() const
Definition: deoptimizer.h:303
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object size
Definition: flags.cc:211
TableEntryGenerator(MacroAssembler *masm, BailoutType type, int count)
Definition: deoptimizer.h:283
bool needs_frame
Definition: deoptimizer.h:148
#define OFFSET_OF(type, field)
Definition: globals.h:325
static Address GetDeoptimizationEntry(Isolate *isolate, int id, BailoutType type, GetEntryMode mode=ENSURE_ENTRY_CODE)
Definition: deoptimizer.cc:683
const int kPointerSize
Definition: globals.h:268
Translation(TranslationBuffer *buffer, int frame_count, int jsframe_count, Zone *zone)
Definition: deoptimizer.h:734
static int GetDeoptimizationId(Isolate *isolate, Address addr, BailoutType type)
Definition: deoptimizer.cc:701
EntryGenerator(MacroAssembler *masm, BailoutType type)
Definition: deoptimizer.h:263
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
void SetRegister(unsigned n, intptr_t value)
Definition: deoptimizer.h:531
#define DISALLOW_COPY_AND_ASSIGN(TypeName)
Definition: globals.h:359
static const int kMaxNumRegisters
const Register pc
static void DeoptimizeAll(Isolate *isolate)
Definition: deoptimizer.cc:450
static int GetDeoptimizedCodeCount(Isolate *isolate)
Definition: deoptimizer.cc:743
Definition: deoptimizer.h:137
static void EnsureRelocSpaceForLazyDeoptimization(Handle< Code > code)
static int GetOutputInfo(DeoptimizationOutputData *data, BailoutId node_id, SharedFunctionInfo *shared)
Definition: deoptimizer.cc:718
Definition: v8.h:123
bool is_arguments() const
Definition: deoptimizer.h:92
#define BASE_EMBEDDED
Definition: allocation.h:68
ObjectMaterializationDescriptor(Address slot_address, int frame, int length, int duplicate, bool is_args)
Definition: deoptimizer.h:80
#define LAST(x)
Definition: parser.h:277
void SetCallerConstantPool(unsigned offset, intptr_t value)
static SlotRef NewArgumentsObject(int length)
Definition: deoptimizer.h:813
uint32_t GetFrameSize() const
Definition: deoptimizer.h:485
#define T(name, string, precedence)
Definition: token.cc:48
void SetContinuation(intptr_t pc)
Definition: deoptimizer.h:561
JumpTableEntry(Address entry, Deoptimizer::BailoutType type, bool frame)
Definition: deoptimizer.h:138
static int output_count_offset()
Definition: deoptimizer.h:247
MaterializedObjectStore(Isolate *isolate)
Definition: deoptimizer.h:903
intptr_t GetFrameSlot(unsigned offset)
Definition: deoptimizer.h:494
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function info
Definition: flags.cc:317
friend class DeoptimizedFrameInfo
Definition: deoptimizer.h:462
TranslationIterator(ByteArray *buffer, int index)
Definition: deoptimizer.h:684
static Deoptimizer * Grab(Isolate *isolate)
Definition: deoptimizer.cc:138
static const int kNotDeoptimizationEntry
Definition: deoptimizer.h:258
friend class FrameDescription
Definition: deoptimizer.h:461
StackFrame::Type GetFrameType() const
Definition: deoptimizer.h:563
kInstanceClassNameOffset kNeedsAccessCheckBit kRemovePrototypeBit kIsExpressionBit kAllowLazyCompilation kUsesArguments formal_parameter_count
Definition: objects-inl.h:5190
#define DECLARE_TRANSLATION_OPCODE_ENUM(item)
Definition: deoptimizer.h:727
virtual void GeneratePrologue()
PerThreadAssertScopeDebugOnly< HEAP_ALLOCATION_ASSERT, false > DisallowHeapAllocation
Definition: assert-scope.h:214
double GetDoubleFrameSlot(unsigned offset)
Definition: deoptimizer.h:498
FrameDescription(uint32_t frame_size, JSFunction *function)
void SetContext(intptr_t context)
Definition: deoptimizer.h:551
HeapObject * obj
static size_t GetMaxDeoptTableSize()
Definition: deoptimizer.cc:128
int ConvertJSFrameIndexToFrameIndex(int jsframe_index)
Definition: deoptimizer.cc:147
const Register fp
intptr_t GetRegister(unsigned n) const
Definition: deoptimizer.h:513
static void DeoptimizeMarkedCode(Isolate *isolate)
Definition: deoptimizer.cc:467
void SetDoubleRegister(unsigned n, double value)
Definition: deoptimizer.h:536
int jsframe_count() const
Definition: deoptimizer.h:162
void SetCallerFp(unsigned offset, intptr_t value)
#define ARRAY_SIZE(a)
Definition: globals.h:333
#define TRANSLATION_OPCODE_LIST(V)
Definition: deoptimizer.h:703
void SetTop(intptr_t top)
Definition: deoptimizer.h:542
static int has_alignment_padding_offset()
Definition: deoptimizer.h:252
static Deoptimizer * New(JSFunction *function, BailoutType type, unsigned bailout_id, Address from, int fp_to_sp_delta, Isolate *isolate)
Definition: deoptimizer.cc:105
intptr_t GetConstantPool() const
Definition: deoptimizer.h:553
Object * GetExpression(int index)
Object * GetParameter(int index)