v8  3.25.30(node0.11.13)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
lithium.h
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #ifndef V8_LITHIUM_H_
29 #define V8_LITHIUM_H_
30 
31 #include "allocation.h"
32 #include "hydrogen.h"
33 #include "safepoint-table.h"
34 
35 namespace v8 {
36 namespace internal {
37 
38 #define LITHIUM_OPERAND_LIST(V) \
39  V(ConstantOperand, CONSTANT_OPERAND, 128) \
40  V(StackSlot, STACK_SLOT, 128) \
41  V(DoubleStackSlot, DOUBLE_STACK_SLOT, 128) \
42  V(Register, REGISTER, 16) \
43  V(DoubleRegister, DOUBLE_REGISTER, 16)
44 
45 
46 class LOperand : public ZoneObject {
47  public:
48  enum Kind {
56  };
57 
58  LOperand() : value_(KindField::encode(INVALID)) { }
59 
60  Kind kind() const { return KindField::decode(value_); }
61  int index() const { return static_cast<int>(value_) >> kKindFieldWidth; }
62 #define LITHIUM_OPERAND_PREDICATE(name, type, number) \
63  bool Is##name() const { return kind() == type; }
65  LITHIUM_OPERAND_PREDICATE(Unallocated, UNALLOCATED, 0)
67 #undef LITHIUM_OPERAND_PREDICATE
68  bool Equals(LOperand* other) const { return value_ == other->value_; }
69 
70  void PrintTo(StringStream* stream);
71  void ConvertTo(Kind kind, int index) {
72  value_ = KindField::encode(kind);
73  value_ |= index << kKindFieldWidth;
74  ASSERT(this->index() == index);
75  }
76 
77  // Calls SetUpCache()/TearDownCache() for each subclass.
78  static void SetUpCaches();
79  static void TearDownCaches();
80 
81  protected:
82  static const int kKindFieldWidth = 3;
83  class KindField : public BitField<Kind, 0, kKindFieldWidth> { };
84 
85  LOperand(Kind kind, int index) { ConvertTo(kind, index); }
86 
87  unsigned value_;
88 };
89 
90 
91 class LUnallocated : public LOperand {
92  public:
93  enum BasicPolicy {
96  };
97 
106  };
107 
108  // Lifetime of operand inside the instruction.
109  enum Lifetime {
110  // USED_AT_START operand is guaranteed to be live only at
111  // instruction start. Register allocator is free to assign the same register
112  // to some other operand used inside instruction (i.e. temporary or
113  // output).
115 
116  // USED_AT_END operand is treated as live until the end of
117  // instruction. This means that register allocator will not reuse it's
118  // register for any other operand inside instruction.
120  };
121 
126  }
127 
129  ASSERT(policy == FIXED_SLOT);
130  value_ |= BasicPolicyField::encode(policy);
132  ASSERT(this->fixed_slot_index() == index);
133  }
134 
136  ASSERT(policy == FIXED_REGISTER || policy == FIXED_DOUBLE_REGISTER);
141  }
142 
144  : LOperand(UNALLOCATED, 0) {
147  value_ |= LifetimeField::encode(lifetime);
148  }
149 
151  LUnallocated* result = new(zone) LUnallocated(ANY);
153  return result;
154  }
155 
156  static LUnallocated* cast(LOperand* op) {
157  ASSERT(op->IsUnallocated());
158  return reinterpret_cast<LUnallocated*>(op);
159  }
160 
161  // The encoding used for LUnallocated operands depends on the policy that is
162  // stored within the operand. The FIXED_SLOT policy uses a compact encoding
163  // because it accommodates a larger pay-load.
164  //
165  // For FIXED_SLOT policy:
166  // +------------------------------------------+
167  // | slot_index | vreg | 0 | 001 |
168  // +------------------------------------------+
169  //
170  // For all other (extended) policies:
171  // +------------------------------------------+
172  // | reg_index | L | PPP | vreg | 1 | 001 | L ... Lifetime
173  // +------------------------------------------+ P ... Policy
174  //
175  // The slot index is a signed value which requires us to decode it manually
176  // instead of using the BitField utility class.
177 
178  // The superclass has a KindField.
180 
181  // BitFields for all unallocated operands.
182  class BasicPolicyField : public BitField<BasicPolicy, 3, 1> {};
183  class VirtualRegisterField : public BitField<unsigned, 4, 18> {};
184 
185  // BitFields specific to BasicPolicy::FIXED_SLOT.
186  class FixedSlotIndexField : public BitField<int, 22, 10> {};
187 
188  // BitFields specific to BasicPolicy::EXTENDED_POLICY.
189  class ExtendedPolicyField : public BitField<ExtendedPolicy, 22, 3> {};
190  class LifetimeField : public BitField<Lifetime, 25, 1> {};
191  class FixedRegisterField : public BitField<int, 26, 6> {};
192 
195  static const int kMaxFixedSlotIndex = (1 << (kFixedSlotIndexWidth - 1)) - 1;
196  static const int kMinFixedSlotIndex = -(1 << (kFixedSlotIndexWidth - 1));
197 
198  // Predicates for the operand policy.
199  bool HasAnyPolicy() const {
200  return basic_policy() == EXTENDED_POLICY &&
201  extended_policy() == ANY;
202  }
203  bool HasFixedPolicy() const {
204  return basic_policy() == FIXED_SLOT ||
207  }
208  bool HasRegisterPolicy() const {
209  return basic_policy() == EXTENDED_POLICY && (
212  }
213  bool HasSameAsInputPolicy() const {
214  return basic_policy() == EXTENDED_POLICY &&
216  }
217  bool HasFixedSlotPolicy() const {
218  return basic_policy() == FIXED_SLOT;
219  }
220  bool HasFixedRegisterPolicy() const {
221  return basic_policy() == EXTENDED_POLICY &&
223  }
225  return basic_policy() == EXTENDED_POLICY &&
227  }
229  return basic_policy() == EXTENDED_POLICY &&
231  }
232 
233  // [basic_policy]: Distinguish between FIXED_SLOT and all other policies.
236  }
237 
238  // [extended_policy]: Only for non-FIXED_SLOT. The finer-grained policy.
242  }
243 
244  // [fixed_slot_index]: Only for FIXED_SLOT.
245  int fixed_slot_index() const {
247  return static_cast<int>(value_) >> FixedSlotIndexField::kShift;
248  }
249 
250  // [fixed_register_index]: Only for FIXED_REGISTER or FIXED_DOUBLE_REGISTER.
251  int fixed_register_index() const {
254  }
255 
256  // [virtual_register]: The virtual register ID for this operand.
257  int virtual_register() const {
259  }
260  void set_virtual_register(unsigned id) {
262  }
263 
264  // [lifetime]: Only for non-FIXED_SLOT.
265  bool IsUsedAtStart() {
268  }
269 };
270 
271 
272 class LMoveOperands V8_FINAL BASE_EMBEDDED {
273  public:
274  LMoveOperands(LOperand* source, LOperand* destination)
275  : source_(source), destination_(destination) {
276  }
277 
278  LOperand* source() const { return source_; }
279  void set_source(LOperand* operand) { source_ = operand; }
280 
281  LOperand* destination() const { return destination_; }
282  void set_destination(LOperand* operand) { destination_ = operand; }
283 
284  // The gap resolver marks moves as "in-progress" by clearing the
285  // destination (but not the source).
286  bool IsPending() const {
287  return destination_ == NULL && source_ != NULL;
288  }
289 
290  // True if this move a move into the given destination operand.
291  bool Blocks(LOperand* operand) const {
292  return !IsEliminated() && source()->Equals(operand);
293  }
294 
295  // A move is redundant if it's been eliminated, if its source and
296  // destination are the same, or if its destination is unneeded.
297  bool IsRedundant() const {
298  return IsEliminated() || source_->Equals(destination_) || IsIgnored();
299  }
300 
301  bool IsIgnored() const {
302  return destination_ != NULL && destination_->IsIgnored();
303  }
304 
305  // We clear both operands to indicate move that's been eliminated.
306  void Eliminate() { source_ = destination_ = NULL; }
307  bool IsEliminated() const {
308  ASSERT(source_ != NULL || destination_ == NULL);
309  return source_ == NULL;
310  }
311 
312  private:
313  LOperand* source_;
314  LOperand* destination_;
315 };
316 
317 
318 template<LOperand::Kind kOperandKind, int kNumCachedOperands>
319 class LSubKindOperand V8_FINAL : public LOperand {
320  public:
321  static LSubKindOperand* Create(int index, Zone* zone) {
322  ASSERT(index >= 0);
323  if (index < kNumCachedOperands) return &cache[index];
324  return new(zone) LSubKindOperand(index);
325  }
326 
327  static LSubKindOperand* cast(LOperand* op) {
328  ASSERT(op->kind() == kOperandKind);
329  return reinterpret_cast<LSubKindOperand*>(op);
330  }
331 
332  static void SetUpCache();
333  static void TearDownCache();
334 
335  private:
336  static LSubKindOperand* cache;
337 
338  LSubKindOperand() : LOperand() { }
339  explicit LSubKindOperand(int index) : LOperand(kOperandKind, index) { }
340 };
341 
342 
343 #define LITHIUM_TYPEDEF_SUBKIND_OPERAND_CLASS(name, type, number) \
344 typedef LSubKindOperand<LOperand::type, number> L##name;
346 #undef LITHIUM_TYPEDEF_SUBKIND_OPERAND_CLASS
347 
348 
349 class LParallelMove V8_FINAL : public ZoneObject {
350  public:
351  explicit LParallelMove(Zone* zone) : move_operands_(4, zone) { }
352 
353  void AddMove(LOperand* from, LOperand* to, Zone* zone) {
354  move_operands_.Add(LMoveOperands(from, to), zone);
355  }
356 
357  bool IsRedundant() const;
358 
360  return &move_operands_;
361  }
362 
363  void PrintDataTo(StringStream* stream) const;
364 
365  private:
366  ZoneList<LMoveOperands> move_operands_;
367 };
368 
369 
370 class LPointerMap V8_FINAL : public ZoneObject {
371  public:
372  explicit LPointerMap(Zone* zone)
373  : pointer_operands_(8, zone),
374  untagged_operands_(0, zone),
375  lithium_position_(-1) { }
376 
378  for (int i = 0; i < untagged_operands_.length(); ++i) {
379  RemovePointer(untagged_operands_[i]);
380  }
381  untagged_operands_.Clear();
382  return &pointer_operands_;
383  }
384  int lithium_position() const { return lithium_position_; }
385 
386  void set_lithium_position(int pos) {
387  ASSERT(lithium_position_ == -1);
388  lithium_position_ = pos;
389  }
390 
391  void RecordPointer(LOperand* op, Zone* zone);
392  void RemovePointer(LOperand* op);
393  void RecordUntagged(LOperand* op, Zone* zone);
394  void PrintTo(StringStream* stream);
395 
396  private:
397  ZoneList<LOperand*> pointer_operands_;
398  ZoneList<LOperand*> untagged_operands_;
399  int lithium_position_;
400 };
401 
402 
403 class LEnvironment V8_FINAL : public ZoneObject {
404  public:
406  FrameType frame_type,
407  BailoutId ast_id,
408  int parameter_count,
409  int argument_count,
410  int value_count,
411  LEnvironment* outer,
412  HEnterInlined* entry,
413  Zone* zone)
414  : closure_(closure),
415  frame_type_(frame_type),
416  arguments_stack_height_(argument_count),
417  deoptimization_index_(Safepoint::kNoDeoptimizationIndex),
418  translation_index_(-1),
419  ast_id_(ast_id),
420  translation_size_(value_count),
421  parameter_count_(parameter_count),
422  pc_offset_(-1),
423  values_(value_count, zone),
424  is_tagged_(value_count, zone),
425  is_uint32_(value_count, zone),
426  object_mapping_(0, zone),
427  outer_(outer),
428  entry_(entry),
429  zone_(zone) { }
430 
431  Handle<JSFunction> closure() const { return closure_; }
432  FrameType frame_type() const { return frame_type_; }
433  int arguments_stack_height() const { return arguments_stack_height_; }
434  int deoptimization_index() const { return deoptimization_index_; }
435  int translation_index() const { return translation_index_; }
436  BailoutId ast_id() const { return ast_id_; }
437  int translation_size() const { return translation_size_; }
438  int parameter_count() const { return parameter_count_; }
439  int pc_offset() const { return pc_offset_; }
440  const ZoneList<LOperand*>* values() const { return &values_; }
441  LEnvironment* outer() const { return outer_; }
442  HEnterInlined* entry() { return entry_; }
443  Zone* zone() const { return zone_; }
444 
445  void AddValue(LOperand* operand,
446  Representation representation,
447  bool is_uint32) {
448  values_.Add(operand, zone());
449  if (representation.IsSmiOrTagged()) {
450  ASSERT(!is_uint32);
451  is_tagged_.Add(values_.length() - 1, zone());
452  }
453 
454  if (is_uint32) {
455  is_uint32_.Add(values_.length() - 1, zone());
456  }
457  }
458 
459  bool HasTaggedValueAt(int index) const {
460  return is_tagged_.Contains(index);
461  }
462 
463  bool HasUint32ValueAt(int index) const {
464  return is_uint32_.Contains(index);
465  }
466 
467  void AddNewObject(int length, bool is_arguments) {
468  uint32_t encoded = LengthOrDupeField::encode(length) |
469  IsArgumentsField::encode(is_arguments) |
470  IsDuplicateField::encode(false);
471  object_mapping_.Add(encoded, zone());
472  }
473 
474  void AddDuplicateObject(int dupe_of) {
475  uint32_t encoded = LengthOrDupeField::encode(dupe_of) |
476  IsDuplicateField::encode(true);
477  object_mapping_.Add(encoded, zone());
478  }
479 
480  int ObjectDuplicateOfAt(int index) {
481  ASSERT(ObjectIsDuplicateAt(index));
482  return LengthOrDupeField::decode(object_mapping_[index]);
483  }
484 
485  int ObjectLengthAt(int index) {
486  ASSERT(!ObjectIsDuplicateAt(index));
487  return LengthOrDupeField::decode(object_mapping_[index]);
488  }
489 
490  bool ObjectIsArgumentsAt(int index) {
491  ASSERT(!ObjectIsDuplicateAt(index));
492  return IsArgumentsField::decode(object_mapping_[index]);
493  }
494 
495  bool ObjectIsDuplicateAt(int index) {
496  return IsDuplicateField::decode(object_mapping_[index]);
497  }
498 
499  void Register(int deoptimization_index,
500  int translation_index,
501  int pc_offset) {
502  ASSERT(!HasBeenRegistered());
503  deoptimization_index_ = deoptimization_index;
504  translation_index_ = translation_index;
505  pc_offset_ = pc_offset;
506  }
507  bool HasBeenRegistered() const {
508  return deoptimization_index_ != Safepoint::kNoDeoptimizationIndex;
509  }
510 
511  void PrintTo(StringStream* stream);
512 
513  // Marker value indicating a de-materialized object.
514  static LOperand* materialization_marker() { return NULL; }
515 
516  // Encoding used for the object_mapping map below.
517  class LengthOrDupeField : public BitField<int, 0, 30> { };
518  class IsArgumentsField : public BitField<bool, 30, 1> { };
519  class IsDuplicateField : public BitField<bool, 31, 1> { };
520 
521  private:
522  Handle<JSFunction> closure_;
523  FrameType frame_type_;
524  int arguments_stack_height_;
525  int deoptimization_index_;
526  int translation_index_;
527  BailoutId ast_id_;
528  int translation_size_;
529  int parameter_count_;
530  int pc_offset_;
531 
532  // Value array: [parameters] [locals] [expression stack] [de-materialized].
533  // |>--------- translation_size ---------<|
534  ZoneList<LOperand*> values_;
535  GrowableBitVector is_tagged_;
536  GrowableBitVector is_uint32_;
537 
538  // Map with encoded information about materialization_marker operands.
539  ZoneList<uint32_t> object_mapping_;
540 
541  LEnvironment* outer_;
542  HEnterInlined* entry_;
543  Zone* zone_;
544 };
545 
546 
547 // Iterates over the non-null, non-constant operands in an environment.
548 class ShallowIterator V8_FINAL BASE_EMBEDDED {
549  public:
550  explicit ShallowIterator(LEnvironment* env)
551  : env_(env),
552  limit_(env != NULL ? env->values()->length() : 0),
553  current_(0) {
554  SkipUninteresting();
555  }
556 
557  bool Done() { return current_ >= limit_; }
558 
560  ASSERT(!Done());
561  ASSERT(env_->values()->at(current_) != NULL);
562  return env_->values()->at(current_);
563  }
564 
565  void Advance() {
566  ASSERT(!Done());
567  ++current_;
568  SkipUninteresting();
569  }
570 
571  LEnvironment* env() { return env_; }
572 
573  private:
574  bool ShouldSkip(LOperand* op) {
575  return op == NULL || op->IsConstantOperand();
576  }
577 
578  // Skip until something interesting, beginning with and including current_.
579  void SkipUninteresting() {
580  while (current_ < limit_ && ShouldSkip(env_->values()->at(current_))) {
581  ++current_;
582  }
583  }
584 
585  LEnvironment* env_;
586  int limit_;
587  int current_;
588 };
589 
590 
591 // Iterator for non-null, non-constant operands incl. outer environments.
592 class DeepIterator V8_FINAL BASE_EMBEDDED {
593  public:
594  explicit DeepIterator(LEnvironment* env)
595  : current_iterator_(env) {
596  SkipUninteresting();
597  }
598 
599  bool Done() { return current_iterator_.Done(); }
600 
602  ASSERT(!current_iterator_.Done());
603  ASSERT(current_iterator_.Current() != NULL);
604  return current_iterator_.Current();
605  }
606 
607  void Advance() {
608  current_iterator_.Advance();
609  SkipUninteresting();
610  }
611 
612  private:
613  void SkipUninteresting() {
614  while (current_iterator_.env() != NULL && current_iterator_.Done()) {
615  current_iterator_ = ShallowIterator(current_iterator_.env()->outer());
616  }
617  }
618 
619  ShallowIterator current_iterator_;
620 };
621 
622 
623 class LPlatformChunk;
624 class LGap;
625 class LLabel;
626 
627 // Superclass providing data and behavior common to all the
628 // arch-specific LPlatformChunk classes.
629 class LChunk : public ZoneObject {
630  public:
631  static LChunk* NewChunk(HGraph* graph);
632 
633  void AddInstruction(LInstruction* instruction, HBasicBlock* block);
634  LConstantOperand* DefineConstantOperand(HConstant* constant);
635  HConstant* LookupConstant(LConstantOperand* operand) const;
636  Representation LookupLiteralRepresentation(LConstantOperand* operand) const;
637 
638  int ParameterAt(int index);
639  int GetParameterStackSlot(int index) const;
640  int spill_slot_count() const { return spill_slot_count_; }
641  CompilationInfo* info() const { return info_; }
642  HGraph* graph() const { return graph_; }
643  Isolate* isolate() const { return graph_->isolate(); }
644  const ZoneList<LInstruction*>* instructions() const { return &instructions_; }
645  void AddGapMove(int index, LOperand* from, LOperand* to);
646  LGap* GetGapAt(int index) const;
647  bool IsGapAt(int index) const;
648  int NearestGapPos(int index) const;
649  void MarkEmptyBlocks();
650  const ZoneList<LPointerMap*>* pointer_maps() const { return &pointer_maps_; }
651  LLabel* GetLabel(int block_id) const;
652  int LookupDestination(int block_id) const;
653  Label* GetAssemblyLabel(int block_id) const;
654 
656  return &inlined_closures_;
657  }
658 
660  inlined_closures_.Add(closure, zone());
661  }
662 
663  Zone* zone() const { return info_->zone(); }
664 
666 
667  void set_allocated_double_registers(BitVector* allocated_registers);
669  return allocated_double_registers_;
670  }
671 
672  protected:
673  LChunk(CompilationInfo* info, HGraph* graph);
674 
676 
677  private:
678  CompilationInfo* info_;
679  HGraph* const graph_;
680  BitVector* allocated_double_registers_;
681  ZoneList<LInstruction*> instructions_;
682  ZoneList<LPointerMap*> pointer_maps_;
683  ZoneList<Handle<JSFunction> > inlined_closures_;
684 };
685 
686 
687 class LChunkBuilderBase BASE_EMBEDDED {
688  public:
689  explicit LChunkBuilderBase(Zone* zone)
690  : argument_count_(0),
691  zone_(zone) { }
692 
693  virtual ~LChunkBuilderBase() { }
694 
695  protected:
696  // An input operand in register, stack slot or a constant operand.
697  // Will not be moved to a register even if one is freely available.
698  virtual MUST_USE_RESULT LOperand* UseAny(HValue* value) = 0;
699 
700  LEnvironment* CreateEnvironment(HEnvironment* hydrogen_env,
701  int* argument_index_accumulator,
702  ZoneList<HValue*>* objects_to_materialize);
703  void AddObjectToMaterialize(HValue* value,
704  ZoneList<HValue*>* objects_to_materialize,
705  LEnvironment* result);
706 
707  Zone* zone() const { return zone_; }
708 
710 
711  private:
712  Zone* zone_;
713 };
714 
715 
716 int StackSlotOffset(int index);
717 
721 };
722 
723 
724 class LPhase : public CompilationPhase {
725  public:
726  LPhase(const char* name, LChunk* chunk)
727  : CompilationPhase(name, chunk->info()),
728  chunk_(chunk) { }
729  ~LPhase();
730 
731  private:
732  LChunk* chunk_;
733 
734  DISALLOW_COPY_AND_ASSIGN(LPhase);
735 };
736 
737 
738 } } // namespace v8::internal
739 
740 #endif // V8_LITHIUM_H_
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter NULL
Definition: flags.cc:269
int index() const
Definition: lithium.h:61
static LUnallocated * cast(LOperand *op)
Definition: lithium.h:156
bool IsPending() const
Definition: lithium.h:286
void set_lithium_position(int pos)
Definition: lithium.h:386
bool IsRedundant() const
Definition: lithium.h:297
#define LITHIUM_OPERAND_PREDICATE(name, type, number)
Definition: lithium.h:62
int GetParameterStackSlot(int index) const
Definition: lithium.cc:345
uint16_t current_
BitVector * allocated_double_registers()
Definition: lithium.h:668
Zone * zone() const
Definition: lithium.h:707
DeepIterator(LEnvironment *env)
Definition: lithium.h:594
STATIC_ASSERT(kKindFieldWidth==3)
int fixed_slot_index() const
Definition: lithium.h:245
int translation_size() const
Definition: lithium.h:437
static void TearDownCaches()
Definition: lithium.cc:146
BailoutId ast_id() const
Definition: lithium.h:436
int StackSlotOffset(int index)
Definition: lithium.cc:240
bool HasFixedSlotPolicy() const
Definition: lithium.h:217
Zone * zone() const
Definition: lithium.h:443
static LOperand * materialization_marker()
Definition: lithium.h:514
LPointerMap(Zone *zone)
Definition: lithium.h:372
void AddDuplicateObject(int dupe_of)
Definition: lithium.h:474
const ZoneList< LOperand * > * GetNormalizedOperands()
Definition: lithium.h:377
int ParameterAt(int index)
Definition: lithium.cc:358
LOperand(Kind kind, int index)
Definition: lithium.h:85
LChunkBuilderBase(Zone *zone)
Definition: lithium.h:689
LUnallocated * CopyUnconstrained(Zone *zone)
Definition: lithium.h:150
bool HasFixedRegisterPolicy() const
Definition: lithium.h:220
bool HasUint32ValueAt(int index) const
Definition: lithium.h:463
LPhase(const char *name, LChunk *chunk)
Definition: lithium.h:726
LEnvironment(Handle< JSFunction > closure, FrameType frame_type, BailoutId ast_id, int parameter_count, int argument_count, int value_count, LEnvironment *outer, HEnterInlined *entry, Zone *zone)
Definition: lithium.h:405
#define LITHIUM_TYPEDEF_SUBKIND_OPERAND_CLASS(name, type, number)
Definition: lithium.h:343
int arguments_stack_height() const
Definition: lithium.h:433
int translation_index() const
Definition: lithium.h:435
LUnallocated(ExtendedPolicy policy, Lifetime lifetime)
Definition: lithium.h:143
LLabel * GetLabel(int block_id) const
Definition: lithium.cc:263
const ZoneList< LMoveOperands > * move_operands() const
Definition: lithium.h:359
Handle< Code > Codegen()
Definition: lithium.cc:425
#define ASSERT(condition)
Definition: checks.h:329
bool HasSameAsInputPolicy() const
Definition: lithium.h:213
static void SetUpCaches()
Definition: lithium.cc:139
bool HasRegisterPolicy() const
Definition: lithium.h:208
bool ObjectIsArgumentsAt(int index)
Definition: lithium.h:490
void AddMove(LOperand *from, LOperand *to, Zone *zone)
Definition: lithium.h:353
LChunk(CompilationInfo *info, HGraph *graph)
Definition: lithium.cc:253
static const int kMaxFixedSlotIndex
Definition: lithium.h:195
bool Equals(LOperand *other) const
Definition: lithium.h:68
FrameType frame_type() const
Definition: lithium.h:432
LOperand * destination() const
Definition: lithium.h:281
LGap * GetGapAt(int index) const
Definition: lithium.cc:365
const ZoneList< LOperand * > * values() const
Definition: lithium.h:440
void AddValue(LOperand *operand, Representation representation, bool is_uint32)
Definition: lithium.h:445
static const int kFixedSlotIndexWidth
Definition: lithium.h:194
void set_virtual_register(unsigned id)
Definition: lithium.h:260
bool HasBeenRegistered() const
Definition: lithium.h:507
int virtual_register() const
Definition: lithium.h:257
static const int kMaxVirtualRegisters
Definition: lithium.h:193
void set_source(LOperand *operand)
Definition: lithium.h:279
void ConvertTo(Kind kind, int index)
Definition: lithium.h:71
#define MUST_USE_RESULT
Definition: globals.h:381
LMoveOperands(LOperand *source, LOperand *destination)
Definition: lithium.h:274
static uint32_t update(uint32_tprevious, unsignedvalue)
Definition: utils.h:296
int spill_slot_count() const
Definition: lithium.h:640
Kind kind() const
Definition: lithium.h:60
bool HasTaggedValueAt(int index) const
Definition: lithium.h:459
bool Blocks(LOperand *operand) const
Definition: lithium.h:291
void set_allocated_double_registers(BitVector *allocated_registers)
Definition: lithium.cc:454
BasicPolicy basic_policy() const
Definition: lithium.h:234
static LChunk * NewChunk(HGraph *graph)
Definition: lithium.cc:398
Zone * zone() const
Definition: lithium.h:663
HEnterInlined * entry()
Definition: lithium.h:442
LEnvironment * env()
Definition: lithium.h:571
bool IsIgnored() const
Definition: lithium.h:301
#define BASE_EMBEDDED
Definition: allocation.h:68
void Register(int deoptimization_index, int translation_index, int pc_offset)
Definition: lithium.h:499
int fixed_register_index() const
Definition: lithium.h:251
int ObjectDuplicateOfAt(int index)
Definition: lithium.h:480
bool IsGapAt(int index) const
Definition: lithium.cc:370
void AddInstruction(LInstruction *instruction, HBasicBlock *block)
Definition: lithium.cc:320
HGraph * graph() const
Definition: lithium.h:642
ShallowIterator(LEnvironment *env)
Definition: lithium.h:550
const ZoneList< Handle< JSFunction > > * inlined_closures() const
Definition: lithium.h:655
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function info
Definition: flags.cc:317
CompilationInfo * info() const
Definition: lithium.h:641
int parameter_count() const
Definition: lithium.h:438
LEnvironment * outer() const
Definition: lithium.h:441
virtual ~LChunkBuilderBase()
Definition: lithium.h:693
bool ObjectIsDuplicateAt(int index)
Definition: lithium.h:495
void AddGapMove(int index, LOperand *from, LOperand *to)
Definition: lithium.cc:381
Isolate * isolate() const
Definition: lithium.h:643
const ZoneList< LPointerMap * > * pointer_maps() const
Definition: lithium.h:650
int lithium_position() const
Definition: lithium.h:384
LConstantOperand * DefineConstantOperand(HConstant *constant)
Definition: lithium.cc:340
bool HasFixedPolicy() const
Definition: lithium.h:203
LOperand * source() const
Definition: lithium.h:278
bool IsEliminated() const
Definition: lithium.h:307
static LSubKindOperand * cast(LOperand *op)
Definition: lithium.h:327
LUnallocated(ExtendedPolicy policy, int index)
Definition: lithium.h:135
void MarkEmptyBlocks()
Definition: lithium.cc:285
bool HasAnyPolicy() const
Definition: lithium.h:199
Representation LookupLiteralRepresentation(LConstantOperand *operand) const
Definition: lithium.cc:392
LUnallocated(ExtendedPolicy policy)
Definition: lithium.h:122
Label * GetAssemblyLabel(int block_id) const
Definition: lithium.cc:278
static const int kMinFixedSlotIndex
Definition: lithium.h:196
int NearestGapPos(int index) const
Definition: lithium.cc:375
int ObjectLengthAt(int index)
Definition: lithium.h:485
Handle< JSFunction > closure() const
Definition: lithium.h:431
int LookupDestination(int block_id) const
Definition: lithium.cc:270
#define LITHIUM_OPERAND_LIST(V)
Definition: lithium.h:38
static LSubKindOperand * Create(int index, Zone *zone)
Definition: lithium.h:321
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in name
Definition: flags.cc:505
void set_destination(LOperand *operand)
Definition: lithium.h:282
LUnallocated(BasicPolicy policy, int index)
Definition: lithium.h:128
HConstant * LookupConstant(LConstantOperand *operand) const
Definition: lithium.cc:387
bool HasWritableRegisterPolicy() const
Definition: lithium.h:228
LParallelMove(Zone *zone)
Definition: lithium.h:351
void PrintTo(StringStream *stream)
Definition: lithium.cc:55
ExtendedPolicy extended_policy() const
Definition: lithium.h:239
bool HasFixedDoubleRegisterPolicy() const
Definition: lithium.h:224
const ZoneList< LInstruction * > * instructions() const
Definition: lithium.h:644
int pc_offset() const
Definition: lithium.h:439
int deoptimization_index() const
Definition: lithium.h:434
static const int kKindFieldWidth
Definition: lithium.h:82
void AddNewObject(int length, bool is_arguments)
Definition: lithium.h:467
void AddInlinedClosure(Handle< JSFunction > closure)
Definition: lithium.h:659