v8  3.25.30(node0.11.13)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
lithium-allocator.h
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #ifndef V8_LITHIUM_ALLOCATOR_H_
29 #define V8_LITHIUM_ALLOCATOR_H_
30 
31 #include "v8.h"
32 
33 #include "allocation.h"
34 #include "lithium.h"
35 #include "zone.h"
36 
37 namespace v8 {
38 namespace internal {
39 
40 // Forward declarations.
41 class HBasicBlock;
42 class HGraph;
43 class HInstruction;
44 class HPhi;
45 class HTracer;
46 class HValue;
47 class BitVector;
48 class StringStream;
49 
50 class LPlatformChunk;
51 class LOperand;
52 class LUnallocated;
53 class LGap;
54 class LParallelMove;
55 class LPointerMap;
56 
57 
58 // This class represents a single point of a LOperand's lifetime.
59 // For each lithium instruction there are exactly two lifetime positions:
60 // the beginning and the end of the instruction. Lifetime positions for
61 // different lithium instructions are disjoint.
63  public:
64  // Return the lifetime position that corresponds to the beginning of
65  // the instruction with the given index.
67  return LifetimePosition(index * kStep);
68  }
69 
70  // Returns a numeric representation of this lifetime position.
71  int Value() const {
72  return value_;
73  }
74 
75  // Returns the index of the instruction to which this lifetime position
76  // corresponds.
77  int InstructionIndex() const {
78  ASSERT(IsValid());
79  return value_ / kStep;
80  }
81 
82  // Returns true if this lifetime position corresponds to the instruction
83  // start.
84  bool IsInstructionStart() const {
85  return (value_ & (kStep - 1)) == 0;
86  }
87 
88  // Returns the lifetime position for the start of the instruction which
89  // corresponds to this lifetime position.
91  ASSERT(IsValid());
92  return LifetimePosition(value_ & ~(kStep - 1));
93  }
94 
95  // Returns the lifetime position for the end of the instruction which
96  // corresponds to this lifetime position.
98  ASSERT(IsValid());
99  return LifetimePosition(InstructionStart().Value() + kStep/2);
100  }
101 
102  // Returns the lifetime position for the beginning of the next instruction.
104  ASSERT(IsValid());
105  return LifetimePosition(InstructionStart().Value() + kStep);
106  }
107 
108  // Returns the lifetime position for the beginning of the previous
109  // instruction.
111  ASSERT(IsValid());
112  ASSERT(value_ > 1);
113  return LifetimePosition(InstructionStart().Value() - kStep);
114  }
115 
116  // Constructs the lifetime position which does not correspond to any
117  // instruction.
118  LifetimePosition() : value_(-1) {}
119 
120  // Returns true if this lifetime positions corrensponds to some
121  // instruction.
122  bool IsValid() const { return value_ != -1; }
123 
124  static inline LifetimePosition Invalid() { return LifetimePosition(); }
125 
126  static inline LifetimePosition MaxPosition() {
127  // We have to use this kind of getter instead of static member due to
128  // crash bug in GDB.
129  return LifetimePosition(kMaxInt);
130  }
131 
132  private:
133  static const int kStep = 2;
134 
135  // Code relies on kStep being a power of two.
136  STATIC_ASSERT(IS_POWER_OF_TWO(kStep));
137 
138  explicit LifetimePosition(int value) : value_(value) { }
139 
140  int value_;
141 };
142 
143 
148 };
149 
150 
151 // A register-allocator view of a Lithium instruction. It contains the id of
152 // the output operand and a list of input operand uses.
153 
154 class LInstruction;
155 class LEnvironment;
156 
157 // Iterator for non-null temp operands.
158 class TempIterator BASE_EMBEDDED {
159  public:
160  inline explicit TempIterator(LInstruction* instr);
161  inline bool Done();
162  inline LOperand* Current();
163  inline void Advance();
164 
165  private:
166  inline void SkipUninteresting();
167  LInstruction* instr_;
168  int limit_;
169  int current_;
170 };
171 
172 
173 // Iterator for non-constant input operands.
174 class InputIterator BASE_EMBEDDED {
175  public:
176  inline explicit InputIterator(LInstruction* instr);
177  inline bool Done();
178  inline LOperand* Current();
179  inline void Advance();
180 
181  private:
182  inline void SkipUninteresting();
183  LInstruction* instr_;
184  int limit_;
185  int current_;
186 };
187 
188 
189 class UseIterator BASE_EMBEDDED {
190  public:
191  inline explicit UseIterator(LInstruction* instr);
192  inline bool Done();
193  inline LOperand* Current();
194  inline void Advance();
195 
196  private:
197  InputIterator input_iterator_;
198  DeepIterator env_iterator_;
199 };
200 
201 
202 // Representation of the non-empty interval [start,end[.
203 class UseInterval: public ZoneObject {
204  public:
206  : start_(start), end_(end), next_(NULL) {
207  ASSERT(start.Value() < end.Value());
208  }
209 
210  LifetimePosition start() const { return start_; }
211  LifetimePosition end() const { return end_; }
212  UseInterval* next() const { return next_; }
213 
214  // Split this interval at the given position without effecting the
215  // live range that owns it. The interval must contain the position.
216  void SplitAt(LifetimePosition pos, Zone* zone);
217 
218  // If this interval intersects with other return smallest position
219  // that belongs to both of them.
220  LifetimePosition Intersect(const UseInterval* other) const {
221  if (other->start().Value() < start_.Value()) return other->Intersect(this);
222  if (other->start().Value() < end_.Value()) return other->start();
223  return LifetimePosition::Invalid();
224  }
225 
226  bool Contains(LifetimePosition point) const {
227  return start_.Value() <= point.Value() && point.Value() < end_.Value();
228  }
229 
230  private:
231  void set_start(LifetimePosition start) { start_ = start; }
232  void set_next(UseInterval* next) { next_ = next; }
233 
234  LifetimePosition start_;
235  LifetimePosition end_;
236  UseInterval* next_;
237 
238  friend class LiveRange; // Assigns to start_.
239 };
240 
241 // Representation of a use position.
242 class UsePosition: public ZoneObject {
243  public:
245 
246  LOperand* operand() const { return operand_; }
247  bool HasOperand() const { return operand_ != NULL; }
248 
249  LOperand* hint() const { return hint_; }
250  bool HasHint() const;
251  bool RequiresRegister() const;
252  bool RegisterIsBeneficial() const;
253 
254  LifetimePosition pos() const { return pos_; }
255  UsePosition* next() const { return next_; }
256 
257  private:
258  void set_next(UsePosition* next) { next_ = next; }
259 
260  LOperand* const operand_;
261  LOperand* const hint_;
262  LifetimePosition const pos_;
263  UsePosition* next_;
264  bool requires_reg_;
265  bool register_beneficial_;
266 
267  friend class LiveRange;
268 };
269 
270 // Representation of SSA values' live ranges as a collection of (continuous)
271 // intervals over the instruction ordering.
272 class LiveRange: public ZoneObject {
273  public:
274  static const int kInvalidAssignment = 0x7fffffff;
275 
276  LiveRange(int id, Zone* zone);
277 
278  UseInterval* first_interval() const { return first_interval_; }
279  UsePosition* first_pos() const { return first_pos_; }
280  LiveRange* parent() const { return parent_; }
281  LiveRange* TopLevel() { return (parent_ == NULL) ? this : parent_; }
282  LiveRange* next() const { return next_; }
283  bool IsChild() const { return parent() != NULL; }
284  int id() const { return id_; }
285  bool IsFixed() const { return id_ < 0; }
286  bool IsEmpty() const { return first_interval() == NULL; }
288  int assigned_register() const { return assigned_register_; }
289  int spill_start_index() const { return spill_start_index_; }
290  void set_assigned_register(int reg, Zone* zone);
291  void MakeSpilled(Zone* zone);
292 
293  // Returns use position in this live range that follows both start
294  // and last processed use position.
295  // Modifies internal state of live range!
297 
298  // Returns use position for which register is required in this live
299  // range and which follows both start and last processed use position
300  // Modifies internal state of live range!
302 
303  // Returns use position for which register is beneficial in this live
304  // range and which follows both start and last processed use position
305  // Modifies internal state of live range!
307 
308  // Returns use position for which register is beneficial in this live
309  // range and which precedes start.
311 
312  // Can this live range be spilled at this position.
313  bool CanBeSpilled(LifetimePosition pos);
314 
315  // Split this live range at the given position which must follow the start of
316  // the range.
317  // All uses following the given position will be moved from this
318  // live range to the result live range.
319  void SplitAt(LifetimePosition position, LiveRange* result, Zone* zone);
320 
321  RegisterKind Kind() const { return kind_; }
322  bool HasRegisterAssigned() const {
323  return assigned_register_ != kInvalidAssignment;
324  }
325  bool IsSpilled() const { return spilled_; }
326 
328  ASSERT(current_hint_operand_ == FirstHint());
329  return current_hint_operand_;
330  }
331  LOperand* FirstHint() const {
332  UsePosition* pos = first_pos_;
333  while (pos != NULL && !pos->HasHint()) pos = pos->next();
334  if (pos != NULL) return pos->hint();
335  return NULL;
336  }
337 
339  ASSERT(!IsEmpty());
340  return first_interval()->start();
341  }
342 
344  ASSERT(!IsEmpty());
345  return last_interval_->end();
346  }
347 
348  bool HasAllocatedSpillOperand() const;
349  LOperand* GetSpillOperand() const { return spill_operand_; }
350  void SetSpillOperand(LOperand* operand);
351 
352  void SetSpillStartIndex(int start) {
353  spill_start_index_ = Min(start, spill_start_index_);
354  }
355 
356  bool ShouldBeAllocatedBefore(const LiveRange* other) const;
357  bool CanCover(LifetimePosition position) const;
358  bool Covers(LifetimePosition position);
360 
361  // Add a new interval or a new use position to this live range.
362  void EnsureInterval(LifetimePosition start,
363  LifetimePosition end,
364  Zone* zone);
365  void AddUseInterval(LifetimePosition start,
366  LifetimePosition end,
367  Zone* zone);
369  LOperand* operand,
370  LOperand* hint,
371  Zone* zone);
372 
373  // Shorten the most recently added interval by setting a new start.
374  void ShortenTo(LifetimePosition start);
375 
376 #ifdef DEBUG
377  // True if target overlaps an existing interval.
378  bool HasOverlap(UseInterval* target) const;
379  void Verify() const;
380 #endif
381 
382  private:
383  void ConvertOperands(Zone* zone);
384  UseInterval* FirstSearchIntervalForPosition(LifetimePosition position) const;
385  void AdvanceLastProcessedMarker(UseInterval* to_start_of,
386  LifetimePosition but_not_past) const;
387 
388  int id_;
389  bool spilled_;
390  RegisterKind kind_;
391  int assigned_register_;
392  UseInterval* last_interval_;
393  UseInterval* first_interval_;
394  UsePosition* first_pos_;
395  LiveRange* parent_;
396  LiveRange* next_;
397  // This is used as a cache, it doesn't affect correctness.
398  mutable UseInterval* current_interval_;
399  UsePosition* last_processed_use_;
400  // This is used as a cache, it's invalid outside of BuildLiveRanges.
401  LOperand* current_hint_operand_;
402  LOperand* spill_operand_;
403  int spill_start_index_;
404 
405  friend class LAllocator; // Assigns to kind_.
406 };
407 
408 
409 class LAllocator BASE_EMBEDDED {
410  public:
411  LAllocator(int first_virtual_register, HGraph* graph);
412 
413  static void TraceAlloc(const char* msg, ...);
414 
415  // Checks whether the value of a given virtual register is tagged.
416  bool HasTaggedValue(int virtual_register) const;
417 
418  // Returns the register kind required by the given virtual register.
419  RegisterKind RequiredRegisterKind(int virtual_register) const;
420 
421  bool Allocate(LChunk* chunk);
422 
423  const ZoneList<LiveRange*>* live_ranges() const { return &live_ranges_; }
425  return &fixed_live_ranges_;
426  }
428  return &fixed_double_live_ranges_;
429  }
430 
431  LPlatformChunk* chunk() const { return chunk_; }
432  HGraph* graph() const { return graph_; }
433  Isolate* isolate() const { return graph_->isolate(); }
434  Zone* zone() { return &zone_; }
435 
437  if (next_virtual_register_ >= LUnallocated::kMaxVirtualRegisters) {
438  allocation_ok_ = false;
439  // Maintain the invariant that we return something below the maximum.
440  return 0;
441  }
442  return next_virtual_register_++;
443  }
444 
445  bool AllocationOk() { return allocation_ok_; }
446 
447  void MarkAsOsrEntry() {
448  // There can be only one.
449  ASSERT(!has_osr_entry_);
450  // Simply set a flag to find and process instruction later.
451  has_osr_entry_ = true;
452  }
453 
454 #ifdef DEBUG
455  void Verify() const;
456 #endif
457 
459  return assigned_registers_;
460  }
462  return assigned_double_registers_;
463  }
464 
465  private:
466  void MeetRegisterConstraints();
467  void ResolvePhis();
468  void BuildLiveRanges();
469  void AllocateGeneralRegisters();
470  void AllocateDoubleRegisters();
471  void ConnectRanges();
472  void ResolveControlFlow();
473  void PopulatePointerMaps();
474  void AllocateRegisters();
475  bool CanEagerlyResolveControlFlow(HBasicBlock* block) const;
476  inline bool SafePointsAreInOrder() const;
477 
478  // Liveness analysis support.
479  void InitializeLivenessAnalysis();
480  BitVector* ComputeLiveOut(HBasicBlock* block);
481  void AddInitialIntervals(HBasicBlock* block, BitVector* live_out);
482  void ProcessInstructions(HBasicBlock* block, BitVector* live);
483  void MeetRegisterConstraints(HBasicBlock* block);
484  void MeetConstraintsBetween(LInstruction* first,
485  LInstruction* second,
486  int gap_index);
487  void ResolvePhis(HBasicBlock* block);
488 
489  // Helper methods for building intervals.
490  LOperand* AllocateFixed(LUnallocated* operand, int pos, bool is_tagged);
491  LiveRange* LiveRangeFor(LOperand* operand);
492  void Define(LifetimePosition position, LOperand* operand, LOperand* hint);
493  void Use(LifetimePosition block_start,
494  LifetimePosition position,
495  LOperand* operand,
496  LOperand* hint);
497  void AddConstraintsGapMove(int index, LOperand* from, LOperand* to);
498 
499  // Helper methods for updating the life range lists.
500  void AddToActive(LiveRange* range);
501  void AddToInactive(LiveRange* range);
502  void AddToUnhandledSorted(LiveRange* range);
503  void AddToUnhandledUnsorted(LiveRange* range);
504  void SortUnhandled();
505  bool UnhandledIsSorted();
506  void ActiveToHandled(LiveRange* range);
507  void ActiveToInactive(LiveRange* range);
508  void InactiveToHandled(LiveRange* range);
509  void InactiveToActive(LiveRange* range);
510  void FreeSpillSlot(LiveRange* range);
511  LOperand* TryReuseSpillSlot(LiveRange* range);
512 
513  // Helper methods for allocating registers.
514  bool TryAllocateFreeReg(LiveRange* range);
515  void AllocateBlockedReg(LiveRange* range);
516 
517  // Live range splitting helpers.
518 
519  // Split the given range at the given position.
520  // If range starts at or after the given position then the
521  // original range is returned.
522  // Otherwise returns the live range that starts at pos and contains
523  // all uses from the original range that follow pos. Uses at pos will
524  // still be owned by the original range after splitting.
525  LiveRange* SplitRangeAt(LiveRange* range, LifetimePosition pos);
526 
527  // Split the given range in a position from the interval [start, end].
528  LiveRange* SplitBetween(LiveRange* range,
529  LifetimePosition start,
530  LifetimePosition end);
531 
532  // Find a lifetime position in the interval [start, end] which
533  // is optimal for splitting: it is either header of the outermost
534  // loop covered by this interval or the latest possible position.
535  LifetimePosition FindOptimalSplitPos(LifetimePosition start,
536  LifetimePosition end);
537 
538  // Spill the given life range after position pos.
539  void SpillAfter(LiveRange* range, LifetimePosition pos);
540 
541  // Spill the given life range after position [start] and up to position [end].
542  void SpillBetween(LiveRange* range,
543  LifetimePosition start,
544  LifetimePosition end);
545 
546  // Spill the given life range after position [start] and up to position [end].
547  // Range is guaranteed to be spilled at least until position [until].
548  void SpillBetweenUntil(LiveRange* range,
549  LifetimePosition start,
550  LifetimePosition until,
551  LifetimePosition end);
552 
553  void SplitAndSpillIntersecting(LiveRange* range);
554 
555  // If we are trying to spill a range inside the loop try to
556  // hoist spill position out to the point just before the loop.
557  LifetimePosition FindOptimalSpillingPos(LiveRange* range,
558  LifetimePosition pos);
559 
560  void Spill(LiveRange* range);
561  bool IsBlockBoundary(LifetimePosition pos);
562 
563  // Helper methods for resolving control flow.
564  void ResolveControlFlow(LiveRange* range,
565  HBasicBlock* block,
566  HBasicBlock* pred);
567 
568  inline void SetLiveRangeAssignedRegister(LiveRange* range, int reg);
569 
570  // Return parallel move that should be used to connect ranges split at the
571  // given position.
572  LParallelMove* GetConnectingParallelMove(LifetimePosition pos);
573 
574  // Return the block which contains give lifetime position.
575  HBasicBlock* GetBlock(LifetimePosition pos);
576 
577  // Helper methods for the fixed registers.
578  int RegisterCount() const;
579  static int FixedLiveRangeID(int index) { return -index - 1; }
580  static int FixedDoubleLiveRangeID(int index);
581  LiveRange* FixedLiveRangeFor(int index);
582  LiveRange* FixedDoubleLiveRangeFor(int index);
583  LiveRange* LiveRangeFor(int index);
584  HPhi* LookupPhi(LOperand* operand) const;
585  LGap* GetLastGap(HBasicBlock* block);
586 
587  const char* RegisterName(int allocation_index);
588 
589  inline bool IsGapAt(int index);
590 
591  inline LInstruction* InstructionAt(int index);
592 
593  inline LGap* GapAt(int index);
594 
595  Zone zone_;
596 
597  LPlatformChunk* chunk_;
598 
599  // During liveness analysis keep a mapping from block id to live_in sets
600  // for blocks already analyzed.
601  ZoneList<BitVector*> live_in_sets_;
602 
603  // Liveness analysis results.
604  ZoneList<LiveRange*> live_ranges_;
605 
606  // Lists of live ranges
607  EmbeddedVector<LiveRange*, Register::kMaxNumAllocatableRegisters>
608  fixed_live_ranges_;
609  EmbeddedVector<LiveRange*, DoubleRegister::kMaxNumAllocatableRegisters>
610  fixed_double_live_ranges_;
611  ZoneList<LiveRange*> unhandled_live_ranges_;
612  ZoneList<LiveRange*> active_live_ranges_;
613  ZoneList<LiveRange*> inactive_live_ranges_;
614  ZoneList<LiveRange*> reusable_slots_;
615 
616  // Next virtual register number to be assigned to temporaries.
617  int next_virtual_register_;
618  int first_artificial_register_;
619  GrowableBitVector double_artificial_registers_;
620 
621  RegisterKind mode_;
622  int num_registers_;
623 
624  BitVector* assigned_registers_;
625  BitVector* assigned_double_registers_;
626 
627  HGraph* graph_;
628 
629  bool has_osr_entry_;
630 
631  // Indicates success or failure during register allocation.
632  bool allocation_ok_;
633 
634 #ifdef DEBUG
635  LifetimePosition allocation_finger_;
636 #endif
637 
638  DISALLOW_COPY_AND_ASSIGN(LAllocator);
639 };
640 
641 
642 class LAllocatorPhase : public CompilationPhase {
643  public:
644  LAllocatorPhase(const char* name, LAllocator* allocator);
646 
647  private:
648  LAllocator* allocator_;
649  unsigned allocator_zone_start_allocation_size_;
650 
651  DISALLOW_COPY_AND_ASSIGN(LAllocatorPhase);
652 };
653 
654 
655 } } // namespace v8::internal
656 
657 #endif // V8_LITHIUM_ALLOCATOR_H_
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter NULL
Definition: flags.cc:269
LOperand * operand() const
UseInterval * next() const
LifetimePosition FirstIntersection(LiveRange *other)
bool HasAllocatedSpillOperand() const
uint16_t current_
void MakeSpilled(Zone *zone)
LAllocatorPhase(const char *name, LAllocator *allocator)
static LifetimePosition MaxPosition()
LiveRange(int id, Zone *zone)
LiveRange * parent() const
static LifetimePosition Invalid()
void ShortenTo(LifetimePosition start)
bool ShouldBeAllocatedBefore(const LiveRange *other) const
const int kMaxInt
Definition: globals.h:248
UsePosition * NextUsePosition(LifetimePosition start)
static LifetimePosition FromInstructionIndex(int index)
bool CanCover(LifetimePosition position) const
void SetSpillStartIndex(int start)
#define ASSERT(condition)
Definition: checks.h:329
LifetimePosition InstructionStart() const
LifetimePosition NextInstruction() const
LifetimePosition pos() const
bool Covers(LifetimePosition position)
LOperand * CreateAssignedOperand(Zone *zone)
LOperand * hint() const
static const int kMaxVirtualRegisters
Definition: lithium.h:193
void set_assigned_register(int reg, Zone *zone)
LifetimePosition End() const
bool CanBeSpilled(LifetimePosition pos)
bool HasRegisterAssigned() const
void SplitAt(LifetimePosition position, LiveRange *result, Zone *zone)
LifetimePosition end() const
void SetSpillOperand(LOperand *operand)
LOperand * current_hint_operand() const
UsePosition * NextUsePositionRegisterIsBeneficial(LifetimePosition start)
const Vector< LiveRange * > * fixed_double_live_ranges() const
UseInterval * first_interval() const
#define DISALLOW_COPY_AND_ASSIGN(TypeName)
Definition: globals.h:359
void SplitAt(LifetimePosition pos, Zone *zone)
LifetimePosition PrevInstruction() const
bool Contains(LifetimePosition point) const
const ZoneList< LiveRange * > * live_ranges() const
#define BASE_EMBEDDED
Definition: allocation.h:68
LOperand * FirstHint() const
void AddUsePosition(LifetimePosition pos, LOperand *operand, LOperand *hint, Zone *zone)
LifetimePosition start() const
LifetimePosition Start() const
UseInterval(LifetimePosition start, LifetimePosition end)
UsePosition(LifetimePosition pos, LOperand *operand, LOperand *hint)
void EnsureInterval(LifetimePosition start, LifetimePosition end, Zone *zone)
UsePosition * first_pos() const
#define IS_POWER_OF_TWO(x)
Definition: utils.h:46
UsePosition * NextRegisterPosition(LifetimePosition start)
UsePosition * PreviousUsePositionRegisterIsBeneficial(LifetimePosition start)
LifetimePosition Intersect(const UseInterval *other) const
RegisterKind Kind() const
UsePosition * next() const
T Min(T a, T b)
Definition: utils.h:234
void AddUseInterval(LifetimePosition start, LifetimePosition end, Zone *zone)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in name
Definition: flags.cc:505
LOperand * GetSpillOperand() const
LPlatformChunk * chunk() const
const Vector< LiveRange * > * fixed_live_ranges() const
static const int kInvalidAssignment
LiveRange * next() const
LifetimePosition InstructionEnd() const