v8  3.25.30(node0.11.13)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
hydrogen-gvn.cc
Go to the documentation of this file.
1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "hydrogen.h"
29 #include "hydrogen-gvn.h"
30 #include "v8.h"
31 
32 namespace v8 {
33 namespace internal {
34 
35 class HInstructionMap V8_FINAL : public ZoneObject {
36  public:
37  HInstructionMap(Zone* zone, SideEffectsTracker* side_effects_tracker)
38  : array_size_(0),
39  lists_size_(0),
40  count_(0),
41  array_(NULL),
42  lists_(NULL),
43  free_list_head_(kNil),
44  side_effects_tracker_(side_effects_tracker) {
45  ResizeLists(kInitialSize, zone);
46  Resize(kInitialSize, zone);
47  }
48 
49  void Kill(SideEffects side_effects);
50 
51  void Add(HInstruction* instr, Zone* zone) {
52  present_depends_on_.Add(side_effects_tracker_->ComputeDependsOn(instr));
53  Insert(instr, zone);
54  }
55 
56  HInstruction* Lookup(HInstruction* instr) const;
57 
58  HInstructionMap* Copy(Zone* zone) const {
59  return new(zone) HInstructionMap(zone, this);
60  }
61 
62  bool IsEmpty() const { return count_ == 0; }
63 
64  private:
65  // A linked list of HInstruction* values. Stored in arrays.
66  struct HInstructionMapListElement {
67  HInstruction* instr;
68  int next; // Index in the array of the next list element.
69  };
70  static const int kNil = -1; // The end of a linked list
71 
72  // Must be a power of 2.
73  static const int kInitialSize = 16;
74 
75  HInstructionMap(Zone* zone, const HInstructionMap* other);
76 
77  void Resize(int new_size, Zone* zone);
78  void ResizeLists(int new_size, Zone* zone);
79  void Insert(HInstruction* instr, Zone* zone);
80  uint32_t Bound(uint32_t value) const { return value & (array_size_ - 1); }
81 
82  int array_size_;
83  int lists_size_;
84  int count_; // The number of values stored in the HInstructionMap.
85  SideEffects present_depends_on_;
86  HInstructionMapListElement* array_;
87  // Primary store - contains the first value
88  // with a given hash. Colliding elements are stored in linked lists.
89  HInstructionMapListElement* lists_;
90  // The linked lists containing hash collisions.
91  int free_list_head_; // Unused elements in lists_ are on the free list.
92  SideEffectsTracker* side_effects_tracker_;
93 };
94 
95 
96 class HSideEffectMap V8_FINAL BASE_EMBEDDED {
97  public:
98  HSideEffectMap();
99  explicit HSideEffectMap(HSideEffectMap* other);
100  HSideEffectMap& operator= (const HSideEffectMap& other);
101 
102  void Kill(SideEffects side_effects);
103 
104  void Store(SideEffects side_effects, HInstruction* instr);
105 
106  bool IsEmpty() const { return count_ == 0; }
107 
108  inline HInstruction* operator[](int i) const {
109  ASSERT(0 <= i);
111  return data_[i];
112  }
113  inline HInstruction* at(int i) const { return operator[](i); }
114 
115  private:
116  int count_;
118 };
119 
120 
121 void TraceGVN(const char* msg, ...) {
122  va_list arguments;
123  va_start(arguments, msg);
124  OS::VPrint(msg, arguments);
125  va_end(arguments);
126 }
127 
128 
129 // Wrap TraceGVN in macros to avoid the expense of evaluating its arguments when
130 // --trace-gvn is off.
131 #define TRACE_GVN_1(msg, a1) \
132  if (FLAG_trace_gvn) { \
133  TraceGVN(msg, a1); \
134  }
135 
136 #define TRACE_GVN_2(msg, a1, a2) \
137  if (FLAG_trace_gvn) { \
138  TraceGVN(msg, a1, a2); \
139  }
140 
141 #define TRACE_GVN_3(msg, a1, a2, a3) \
142  if (FLAG_trace_gvn) { \
143  TraceGVN(msg, a1, a2, a3); \
144  }
145 
146 #define TRACE_GVN_4(msg, a1, a2, a3, a4) \
147  if (FLAG_trace_gvn) { \
148  TraceGVN(msg, a1, a2, a3, a4); \
149  }
150 
151 #define TRACE_GVN_5(msg, a1, a2, a3, a4, a5) \
152  if (FLAG_trace_gvn) { \
153  TraceGVN(msg, a1, a2, a3, a4, a5); \
154  }
155 
156 
157 HInstructionMap::HInstructionMap(Zone* zone, const HInstructionMap* other)
158  : array_size_(other->array_size_),
159  lists_size_(other->lists_size_),
160  count_(other->count_),
161  present_depends_on_(other->present_depends_on_),
162  array_(zone->NewArray<HInstructionMapListElement>(other->array_size_)),
163  lists_(zone->NewArray<HInstructionMapListElement>(other->lists_size_)),
164  free_list_head_(other->free_list_head_),
165  side_effects_tracker_(other->side_effects_tracker_) {
166  OS::MemCopy(
167  array_, other->array_, array_size_ * sizeof(HInstructionMapListElement));
168  OS::MemCopy(
169  lists_, other->lists_, lists_size_ * sizeof(HInstructionMapListElement));
170 }
171 
172 
173 void HInstructionMap::Kill(SideEffects changes) {
174  if (!present_depends_on_.ContainsAnyOf(changes)) return;
175  present_depends_on_.RemoveAll();
176  for (int i = 0; i < array_size_; ++i) {
177  HInstruction* instr = array_[i].instr;
178  if (instr != NULL) {
179  // Clear list of collisions first, so we know if it becomes empty.
180  int kept = kNil; // List of kept elements.
181  int next;
182  for (int current = array_[i].next; current != kNil; current = next) {
183  next = lists_[current].next;
184  HInstruction* instr = lists_[current].instr;
185  SideEffects depends_on = side_effects_tracker_->ComputeDependsOn(instr);
186  if (depends_on.ContainsAnyOf(changes)) {
187  // Drop it.
188  count_--;
189  lists_[current].next = free_list_head_;
190  free_list_head_ = current;
191  } else {
192  // Keep it.
193  lists_[current].next = kept;
194  kept = current;
195  present_depends_on_.Add(depends_on);
196  }
197  }
198  array_[i].next = kept;
199 
200  // Now possibly drop directly indexed element.
201  instr = array_[i].instr;
202  SideEffects depends_on = side_effects_tracker_->ComputeDependsOn(instr);
203  if (depends_on.ContainsAnyOf(changes)) { // Drop it.
204  count_--;
205  int head = array_[i].next;
206  if (head == kNil) {
207  array_[i].instr = NULL;
208  } else {
209  array_[i].instr = lists_[head].instr;
210  array_[i].next = lists_[head].next;
211  lists_[head].next = free_list_head_;
212  free_list_head_ = head;
213  }
214  } else {
215  present_depends_on_.Add(depends_on); // Keep it.
216  }
217  }
218  }
219 }
220 
221 
222 HInstruction* HInstructionMap::Lookup(HInstruction* instr) const {
223  uint32_t hash = static_cast<uint32_t>(instr->Hashcode());
224  uint32_t pos = Bound(hash);
225  if (array_[pos].instr != NULL) {
226  if (array_[pos].instr->Equals(instr)) return array_[pos].instr;
227  int next = array_[pos].next;
228  while (next != kNil) {
229  if (lists_[next].instr->Equals(instr)) return lists_[next].instr;
230  next = lists_[next].next;
231  }
232  }
233  return NULL;
234 }
235 
236 
237 void HInstructionMap::Resize(int new_size, Zone* zone) {
238  ASSERT(new_size > count_);
239  // Hashing the values into the new array has no more collisions than in the
240  // old hash map, so we can use the existing lists_ array, if we are careful.
241 
242  // Make sure we have at least one free element.
243  if (free_list_head_ == kNil) {
244  ResizeLists(lists_size_ << 1, zone);
245  }
246 
247  HInstructionMapListElement* new_array =
248  zone->NewArray<HInstructionMapListElement>(new_size);
249  memset(new_array, 0, sizeof(HInstructionMapListElement) * new_size);
250 
251  HInstructionMapListElement* old_array = array_;
252  int old_size = array_size_;
253 
254  int old_count = count_;
255  count_ = 0;
256  // Do not modify present_depends_on_. It is currently correct.
257  array_size_ = new_size;
258  array_ = new_array;
259 
260  if (old_array != NULL) {
261  // Iterate over all the elements in lists, rehashing them.
262  for (int i = 0; i < old_size; ++i) {
263  if (old_array[i].instr != NULL) {
264  int current = old_array[i].next;
265  while (current != kNil) {
266  Insert(lists_[current].instr, zone);
267  int next = lists_[current].next;
268  lists_[current].next = free_list_head_;
269  free_list_head_ = current;
270  current = next;
271  }
272  // Rehash the directly stored instruction.
273  Insert(old_array[i].instr, zone);
274  }
275  }
276  }
277  USE(old_count);
278  ASSERT(count_ == old_count);
279 }
280 
281 
282 void HInstructionMap::ResizeLists(int new_size, Zone* zone) {
283  ASSERT(new_size > lists_size_);
284 
285  HInstructionMapListElement* new_lists =
286  zone->NewArray<HInstructionMapListElement>(new_size);
287  memset(new_lists, 0, sizeof(HInstructionMapListElement) * new_size);
288 
289  HInstructionMapListElement* old_lists = lists_;
290  int old_size = lists_size_;
291 
292  lists_size_ = new_size;
293  lists_ = new_lists;
294 
295  if (old_lists != NULL) {
296  OS::MemCopy(
297  lists_, old_lists, old_size * sizeof(HInstructionMapListElement));
298  }
299  for (int i = old_size; i < lists_size_; ++i) {
300  lists_[i].next = free_list_head_;
301  free_list_head_ = i;
302  }
303 }
304 
305 
306 void HInstructionMap::Insert(HInstruction* instr, Zone* zone) {
307  ASSERT(instr != NULL);
308  // Resizing when half of the hashtable is filled up.
309  if (count_ >= array_size_ >> 1) Resize(array_size_ << 1, zone);
310  ASSERT(count_ < array_size_);
311  count_++;
312  uint32_t pos = Bound(static_cast<uint32_t>(instr->Hashcode()));
313  if (array_[pos].instr == NULL) {
314  array_[pos].instr = instr;
315  array_[pos].next = kNil;
316  } else {
317  if (free_list_head_ == kNil) {
318  ResizeLists(lists_size_ << 1, zone);
319  }
320  int new_element_pos = free_list_head_;
321  ASSERT(new_element_pos != kNil);
322  free_list_head_ = lists_[free_list_head_].next;
323  lists_[new_element_pos].instr = instr;
324  lists_[new_element_pos].next = array_[pos].next;
325  ASSERT(array_[pos].next == kNil || lists_[array_[pos].next].instr != NULL);
326  array_[pos].next = new_element_pos;
327  }
328 }
329 
330 
331 HSideEffectMap::HSideEffectMap() : count_(0) {
332  memset(data_, 0, kNumberOfTrackedSideEffects * kPointerSize);
333 }
334 
335 
336 HSideEffectMap::HSideEffectMap(HSideEffectMap* other) : count_(other->count_) {
337  *this = *other; // Calls operator=.
338 }
339 
340 
341 HSideEffectMap& HSideEffectMap::operator= (const HSideEffectMap& other) {
342  if (this != &other) {
343  OS::MemCopy(data_, other.data_, kNumberOfTrackedSideEffects * kPointerSize);
344  }
345  return *this;
346 }
347 
348 
349 void HSideEffectMap::Kill(SideEffects side_effects) {
350  for (int i = 0; i < kNumberOfTrackedSideEffects; i++) {
351  if (side_effects.ContainsFlag(GVNFlagFromInt(i))) {
352  if (data_[i] != NULL) count_--;
353  data_[i] = NULL;
354  }
355  }
356 }
357 
358 
359 void HSideEffectMap::Store(SideEffects side_effects, HInstruction* instr) {
360  for (int i = 0; i < kNumberOfTrackedSideEffects; i++) {
361  if (side_effects.ContainsFlag(GVNFlagFromInt(i))) {
362  if (data_[i] == NULL) count_++;
363  data_[i] = instr;
364  }
365  }
366 }
367 
368 
369 SideEffects SideEffectsTracker::ComputeChanges(HInstruction* instr) {
370  int index;
371  SideEffects result(instr->ChangesFlags());
372  if (result.ContainsFlag(kGlobalVars)) {
373  if (instr->IsStoreGlobalCell() &&
374  ComputeGlobalVar(HStoreGlobalCell::cast(instr)->cell(), &index)) {
375  result.RemoveFlag(kGlobalVars);
376  result.AddSpecial(GlobalVar(index));
377  } else {
378  for (index = 0; index < kNumberOfGlobalVars; ++index) {
379  result.AddSpecial(GlobalVar(index));
380  }
381  }
382  }
383  if (result.ContainsFlag(kInobjectFields)) {
384  if (instr->IsStoreNamedField() &&
385  ComputeInobjectField(HStoreNamedField::cast(instr)->access(), &index)) {
386  result.RemoveFlag(kInobjectFields);
387  result.AddSpecial(InobjectField(index));
388  } else {
389  for (index = 0; index < kNumberOfInobjectFields; ++index) {
390  result.AddSpecial(InobjectField(index));
391  }
392  }
393  }
394  return result;
395 }
396 
397 
398 SideEffects SideEffectsTracker::ComputeDependsOn(HInstruction* instr) {
399  int index;
400  SideEffects result(instr->DependsOnFlags());
401  if (result.ContainsFlag(kGlobalVars)) {
402  if (instr->IsLoadGlobalCell() &&
403  ComputeGlobalVar(HLoadGlobalCell::cast(instr)->cell(), &index)) {
404  result.RemoveFlag(kGlobalVars);
405  result.AddSpecial(GlobalVar(index));
406  } else {
407  for (index = 0; index < kNumberOfGlobalVars; ++index) {
408  result.AddSpecial(GlobalVar(index));
409  }
410  }
411  }
412  if (result.ContainsFlag(kInobjectFields)) {
413  if (instr->IsLoadNamedField() &&
414  ComputeInobjectField(HLoadNamedField::cast(instr)->access(), &index)) {
415  result.RemoveFlag(kInobjectFields);
416  result.AddSpecial(InobjectField(index));
417  } else {
418  for (index = 0; index < kNumberOfInobjectFields; ++index) {
419  result.AddSpecial(InobjectField(index));
420  }
421  }
422  }
423  return result;
424 }
425 
426 
427 void SideEffectsTracker::PrintSideEffectsTo(StringStream* stream,
428  SideEffects side_effects) const {
429  const char* separator = "";
430  stream->Add("[");
431  for (int bit = 0; bit < kNumberOfFlags; ++bit) {
432  GVNFlag flag = GVNFlagFromInt(bit);
433  if (side_effects.ContainsFlag(flag)) {
434  stream->Add(separator);
435  separator = ", ";
436  switch (flag) {
437 #define DECLARE_FLAG(Type) \
438  case k##Type: \
439  stream->Add(#Type); \
440  break;
443 #undef DECLARE_FLAG
444  default:
445  break;
446  }
447  }
448  }
449  for (int index = 0; index < num_global_vars_; ++index) {
450  if (side_effects.ContainsSpecial(GlobalVar(index))) {
451  stream->Add(separator);
452  separator = ", ";
453  stream->Add("[%p]", *global_vars_[index].handle());
454  }
455  }
456  for (int index = 0; index < num_inobject_fields_; ++index) {
457  if (side_effects.ContainsSpecial(InobjectField(index))) {
458  stream->Add(separator);
459  separator = ", ";
460  inobject_fields_[index].PrintTo(stream);
461  }
462  }
463  stream->Add("]");
464 }
465 
466 
467 bool SideEffectsTracker::ComputeGlobalVar(Unique<Cell> cell, int* index) {
468  for (int i = 0; i < num_global_vars_; ++i) {
469  if (cell == global_vars_[i]) {
470  *index = i;
471  return true;
472  }
473  }
474  if (num_global_vars_ < kNumberOfGlobalVars) {
475  if (FLAG_trace_gvn) {
476  HeapStringAllocator allocator;
477  StringStream stream(&allocator);
478  stream.Add("Tracking global var [%p] (mapped to index %d)\n",
479  *cell.handle(), num_global_vars_);
480  stream.OutputToStdOut();
481  }
482  *index = num_global_vars_;
483  global_vars_[num_global_vars_++] = cell;
484  return true;
485  }
486  return false;
487 }
488 
489 
490 bool SideEffectsTracker::ComputeInobjectField(HObjectAccess access,
491  int* index) {
492  for (int i = 0; i < num_inobject_fields_; ++i) {
493  if (access.Equals(inobject_fields_[i])) {
494  *index = i;
495  return true;
496  }
497  }
498  if (num_inobject_fields_ < kNumberOfInobjectFields) {
499  if (FLAG_trace_gvn) {
500  HeapStringAllocator allocator;
501  StringStream stream(&allocator);
502  stream.Add("Tracking inobject field access ");
503  access.PrintTo(&stream);
504  stream.Add(" (mapped to index %d)\n", num_inobject_fields_);
505  stream.OutputToStdOut();
506  }
507  *index = num_inobject_fields_;
508  inobject_fields_[num_inobject_fields_++] = access;
509  return true;
510  }
511  return false;
512 }
513 
514 
515 HGlobalValueNumberingPhase::HGlobalValueNumberingPhase(HGraph* graph)
516  : HPhase("H_Global value numbering", graph),
517  removed_side_effects_(false),
518  block_side_effects_(graph->blocks()->length(), zone()),
519  loop_side_effects_(graph->blocks()->length(), zone()),
520  visited_on_paths_(graph->blocks()->length(), zone()) {
521  ASSERT(!AllowHandleAllocation::IsAllowed());
522  block_side_effects_.AddBlock(
523  SideEffects(), graph->blocks()->length(), zone());
524  loop_side_effects_.AddBlock(
525  SideEffects(), graph->blocks()->length(), zone());
526 }
527 
528 
529 void HGlobalValueNumberingPhase::Run() {
530  ASSERT(!removed_side_effects_);
531  for (int i = FLAG_gvn_iterations; i > 0; --i) {
532  // Compute the side effects.
533  ComputeBlockSideEffects();
534 
535  // Perform loop invariant code motion if requested.
536  if (FLAG_loop_invariant_code_motion) LoopInvariantCodeMotion();
537 
538  // Perform the actual value numbering.
539  AnalyzeGraph();
540 
541  // Continue GVN if we removed any side effects.
542  if (!removed_side_effects_) break;
543  removed_side_effects_ = false;
544 
545  // Clear all side effects.
546  ASSERT_EQ(block_side_effects_.length(), graph()->blocks()->length());
547  ASSERT_EQ(loop_side_effects_.length(), graph()->blocks()->length());
548  for (int i = 0; i < graph()->blocks()->length(); ++i) {
549  block_side_effects_[i].RemoveAll();
550  loop_side_effects_[i].RemoveAll();
551  }
552  visited_on_paths_.Clear();
553  }
554 }
555 
556 
557 void HGlobalValueNumberingPhase::ComputeBlockSideEffects() {
558  for (int i = graph()->blocks()->length() - 1; i >= 0; --i) {
559  // Compute side effects for the block.
560  HBasicBlock* block = graph()->blocks()->at(i);
561  SideEffects side_effects;
562  if (block->IsReachable() && !block->IsDeoptimizing()) {
563  int id = block->block_id();
564  for (HInstructionIterator it(block); !it.Done(); it.Advance()) {
565  HInstruction* instr = it.Current();
566  side_effects.Add(side_effects_tracker_.ComputeChanges(instr));
567  }
568  block_side_effects_[id].Add(side_effects);
569 
570  // Loop headers are part of their loop.
571  if (block->IsLoopHeader()) {
572  loop_side_effects_[id].Add(side_effects);
573  }
574 
575  // Propagate loop side effects upwards.
576  if (block->HasParentLoopHeader()) {
577  HBasicBlock* with_parent = block;
578  if (block->IsLoopHeader()) side_effects = loop_side_effects_[id];
579  do {
580  HBasicBlock* parent_block = with_parent->parent_loop_header();
581  loop_side_effects_[parent_block->block_id()].Add(side_effects);
582  with_parent = parent_block;
583  } while (with_parent->HasParentLoopHeader());
584  }
585  }
586  }
587 }
588 
589 
590 void HGlobalValueNumberingPhase::LoopInvariantCodeMotion() {
591  TRACE_GVN_1("Using optimistic loop invariant code motion: %s\n",
592  graph()->use_optimistic_licm() ? "yes" : "no");
593  for (int i = graph()->blocks()->length() - 1; i >= 0; --i) {
594  HBasicBlock* block = graph()->blocks()->at(i);
595  if (block->IsLoopHeader()) {
596  SideEffects side_effects = loop_side_effects_[block->block_id()];
597  if (FLAG_trace_gvn) {
598  HeapStringAllocator allocator;
599  StringStream stream(&allocator);
600  stream.Add("Try loop invariant motion for block B%d changes ",
601  block->block_id());
602  side_effects_tracker_.PrintSideEffectsTo(&stream, side_effects);
603  stream.Add("\n");
604  stream.OutputToStdOut();
605  }
606  HBasicBlock* last = block->loop_information()->GetLastBackEdge();
607  for (int j = block->block_id(); j <= last->block_id(); ++j) {
608  ProcessLoopBlock(graph()->blocks()->at(j), block, side_effects);
609  }
610  }
611  }
612 }
613 
614 
615 void HGlobalValueNumberingPhase::ProcessLoopBlock(
616  HBasicBlock* block,
617  HBasicBlock* loop_header,
618  SideEffects loop_kills) {
619  HBasicBlock* pre_header = loop_header->predecessors()->at(0);
620  if (FLAG_trace_gvn) {
621  HeapStringAllocator allocator;
622  StringStream stream(&allocator);
623  stream.Add("Loop invariant code motion for B%d depends on ",
624  block->block_id());
625  side_effects_tracker_.PrintSideEffectsTo(&stream, loop_kills);
626  stream.Add("\n");
627  stream.OutputToStdOut();
628  }
629  HInstruction* instr = block->first();
630  while (instr != NULL) {
631  HInstruction* next = instr->next();
632  if (instr->CheckFlag(HValue::kUseGVN)) {
633  SideEffects changes = side_effects_tracker_.ComputeChanges(instr);
634  SideEffects depends_on = side_effects_tracker_.ComputeDependsOn(instr);
635  if (FLAG_trace_gvn) {
636  HeapStringAllocator allocator;
637  StringStream stream(&allocator);
638  stream.Add("Checking instruction i%d (%s) changes ",
639  instr->id(), instr->Mnemonic());
640  side_effects_tracker_.PrintSideEffectsTo(&stream, changes);
641  stream.Add(", depends on ");
642  side_effects_tracker_.PrintSideEffectsTo(&stream, depends_on);
643  stream.Add(". Loop changes ");
644  side_effects_tracker_.PrintSideEffectsTo(&stream, loop_kills);
645  stream.Add("\n");
646  stream.OutputToStdOut();
647  }
648  bool can_hoist = !depends_on.ContainsAnyOf(loop_kills);
649  if (can_hoist && !graph()->use_optimistic_licm()) {
650  can_hoist = block->IsLoopSuccessorDominator();
651  }
652 
653  if (can_hoist) {
654  bool inputs_loop_invariant = true;
655  for (int i = 0; i < instr->OperandCount(); ++i) {
656  if (instr->OperandAt(i)->IsDefinedAfter(pre_header)) {
657  inputs_loop_invariant = false;
658  }
659  }
660 
661  if (inputs_loop_invariant && ShouldMove(instr, loop_header)) {
662  TRACE_GVN_2("Hoisting loop invariant instruction i%d to block B%d\n",
663  instr->id(), pre_header->block_id());
664  // Move the instruction out of the loop.
665  instr->Unlink();
666  instr->InsertBefore(pre_header->end());
667  if (instr->HasSideEffects()) removed_side_effects_ = true;
668  }
669  }
670  }
671  instr = next;
672  }
673 }
674 
675 
676 bool HGlobalValueNumberingPhase::AllowCodeMotion() {
677  return info()->IsStub() || info()->opt_count() + 1 < FLAG_max_opt_count;
678 }
679 
680 
681 bool HGlobalValueNumberingPhase::ShouldMove(HInstruction* instr,
682  HBasicBlock* loop_header) {
683  // If we've disabled code motion or we're in a block that unconditionally
684  // deoptimizes, don't move any instructions.
685  return AllowCodeMotion() && !instr->block()->IsDeoptimizing() &&
686  instr->block()->IsReachable();
687 }
688 
689 
690 SideEffects
691 HGlobalValueNumberingPhase::CollectSideEffectsOnPathsToDominatedBlock(
692  HBasicBlock* dominator, HBasicBlock* dominated) {
693  SideEffects side_effects;
694  for (int i = 0; i < dominated->predecessors()->length(); ++i) {
695  HBasicBlock* block = dominated->predecessors()->at(i);
696  if (dominator->block_id() < block->block_id() &&
697  block->block_id() < dominated->block_id() &&
698  !visited_on_paths_.Contains(block->block_id())) {
699  visited_on_paths_.Add(block->block_id());
700  side_effects.Add(block_side_effects_[block->block_id()]);
701  if (block->IsLoopHeader()) {
702  side_effects.Add(loop_side_effects_[block->block_id()]);
703  }
704  side_effects.Add(CollectSideEffectsOnPathsToDominatedBlock(
705  dominator, block));
706  }
707  }
708  return side_effects;
709 }
710 
711 
712 // Each instance of this class is like a "stack frame" for the recursive
713 // traversal of the dominator tree done during GVN (the stack is handled
714 // as a double linked list).
715 // We reuse frames when possible so the list length is limited by the depth
716 // of the dominator tree but this forces us to initialize each frame calling
717 // an explicit "Initialize" method instead of a using constructor.
719  public:
721  HBasicBlock* entry_block,
722  HInstructionMap* entry_map) {
723  return new(zone)
724  GvnBasicBlockState(NULL, entry_block, entry_map, NULL, zone);
725  }
726 
727  HBasicBlock* block() { return block_; }
728  HInstructionMap* map() { return map_; }
729  HSideEffectMap* dominators() { return &dominators_; }
730 
732  Zone* zone,
733  HBasicBlock** dominator) {
734  // This assignment needs to happen before calling next_dominated() because
735  // that call can reuse "this" if we are at the last dominated block.
736  *dominator = block();
737  GvnBasicBlockState* result = next_dominated(zone);
738  if (result == NULL) {
739  GvnBasicBlockState* dominator_state = pop();
740  if (dominator_state != NULL) {
741  // This branch is guaranteed not to return NULL because pop() never
742  // returns a state where "is_done() == true".
743  *dominator = dominator_state->block();
744  result = dominator_state->next_dominated(zone);
745  } else {
746  // Unnecessary (we are returning NULL) but done for cleanness.
747  *dominator = NULL;
748  }
749  }
750  return result;
751  }
752 
753  private:
754  void Initialize(HBasicBlock* block,
755  HInstructionMap* map,
756  HSideEffectMap* dominators,
757  bool copy_map,
758  Zone* zone) {
759  block_ = block;
760  map_ = copy_map ? map->Copy(zone) : map;
761  dominated_index_ = -1;
762  length_ = block->dominated_blocks()->length();
763  if (dominators != NULL) {
764  dominators_ = *dominators;
765  }
766  }
767  bool is_done() { return dominated_index_ >= length_; }
768 
769  GvnBasicBlockState(GvnBasicBlockState* previous,
770  HBasicBlock* block,
771  HInstructionMap* map,
772  HSideEffectMap* dominators,
773  Zone* zone)
774  : previous_(previous), next_(NULL) {
775  Initialize(block, map, dominators, true, zone);
776  }
777 
778  GvnBasicBlockState* next_dominated(Zone* zone) {
779  dominated_index_++;
780  if (dominated_index_ == length_ - 1) {
781  // No need to copy the map for the last child in the dominator tree.
782  Initialize(block_->dominated_blocks()->at(dominated_index_),
783  map(),
784  dominators(),
785  false,
786  zone);
787  return this;
788  } else if (dominated_index_ < length_) {
789  return push(zone, block_->dominated_blocks()->at(dominated_index_));
790  } else {
791  return NULL;
792  }
793  }
794 
795  GvnBasicBlockState* push(Zone* zone, HBasicBlock* block) {
796  if (next_ == NULL) {
797  next_ =
798  new(zone) GvnBasicBlockState(this, block, map(), dominators(), zone);
799  } else {
800  next_->Initialize(block, map(), dominators(), true, zone);
801  }
802  return next_;
803  }
804  GvnBasicBlockState* pop() {
805  GvnBasicBlockState* result = previous_;
806  while (result != NULL && result->is_done()) {
807  TRACE_GVN_2("Backtracking from block B%d to block b%d\n",
808  block()->block_id(),
809  previous_->block()->block_id())
810  result = result->previous_;
811  }
812  return result;
813  }
814 
815  GvnBasicBlockState* previous_;
816  GvnBasicBlockState* next_;
817  HBasicBlock* block_;
818  HInstructionMap* map_;
819  HSideEffectMap dominators_;
820  int dominated_index_;
821  int length_;
822 };
823 
824 
825 // This is a recursive traversal of the dominator tree but it has been turned
826 // into a loop to avoid stack overflows.
827 // The logical "stack frames" of the recursion are kept in a list of
828 // GvnBasicBlockState instances.
829 void HGlobalValueNumberingPhase::AnalyzeGraph() {
830  HBasicBlock* entry_block = graph()->entry_block();
831  HInstructionMap* entry_map =
832  new(zone()) HInstructionMap(zone(), &side_effects_tracker_);
833  GvnBasicBlockState* current =
834  GvnBasicBlockState::CreateEntry(zone(), entry_block, entry_map);
835 
836  while (current != NULL) {
837  HBasicBlock* block = current->block();
838  HInstructionMap* map = current->map();
839  HSideEffectMap* dominators = current->dominators();
840 
841  TRACE_GVN_2("Analyzing block B%d%s\n",
842  block->block_id(),
843  block->IsLoopHeader() ? " (loop header)" : "");
844 
845  // If this is a loop header kill everything killed by the loop.
846  if (block->IsLoopHeader()) {
847  map->Kill(loop_side_effects_[block->block_id()]);
848  dominators->Kill(loop_side_effects_[block->block_id()]);
849  }
850 
851  // Go through all instructions of the current block.
852  for (HInstructionIterator it(block); !it.Done(); it.Advance()) {
853  HInstruction* instr = it.Current();
854  if (instr->CheckFlag(HValue::kTrackSideEffectDominators)) {
855  for (int i = 0; i < kNumberOfTrackedSideEffects; i++) {
856  HValue* other = dominators->at(i);
857  GVNFlag flag = GVNFlagFromInt(i);
858  if (instr->DependsOnFlags().Contains(flag) && other != NULL) {
859  TRACE_GVN_5("Side-effect #%d in %d (%s) is dominated by %d (%s)\n",
860  i,
861  instr->id(),
862  instr->Mnemonic(),
863  other->id(),
864  other->Mnemonic());
865  if (instr->HandleSideEffectDominator(flag, other)) {
866  removed_side_effects_ = true;
867  }
868  }
869  }
870  }
871  // Instruction was unlinked during graph traversal.
872  if (!instr->IsLinked()) continue;
873 
874  SideEffects changes = side_effects_tracker_.ComputeChanges(instr);
875  if (!changes.IsEmpty()) {
876  // Clear all instructions in the map that are affected by side effects.
877  // Store instruction as the dominating one for tracked side effects.
878  map->Kill(changes);
879  dominators->Store(changes, instr);
880  if (FLAG_trace_gvn) {
881  HeapStringAllocator allocator;
882  StringStream stream(&allocator);
883  stream.Add("Instruction i%d changes ", instr->id());
884  side_effects_tracker_.PrintSideEffectsTo(&stream, changes);
885  stream.Add("\n");
886  stream.OutputToStdOut();
887  }
888  }
889  if (instr->CheckFlag(HValue::kUseGVN)) {
890  ASSERT(!instr->HasObservableSideEffects());
891  HInstruction* other = map->Lookup(instr);
892  if (other != NULL) {
893  ASSERT(instr->Equals(other) && other->Equals(instr));
894  TRACE_GVN_4("Replacing instruction i%d (%s) with i%d (%s)\n",
895  instr->id(),
896  instr->Mnemonic(),
897  other->id(),
898  other->Mnemonic());
899  if (instr->HasSideEffects()) removed_side_effects_ = true;
900  instr->DeleteAndReplaceWith(other);
901  } else {
902  map->Add(instr, zone());
903  }
904  }
905  }
906 
907  HBasicBlock* dominator_block;
908  GvnBasicBlockState* next =
909  current->next_in_dominator_tree_traversal(zone(),
910  &dominator_block);
911 
912  if (next != NULL) {
913  HBasicBlock* dominated = next->block();
914  HInstructionMap* successor_map = next->map();
915  HSideEffectMap* successor_dominators = next->dominators();
916 
917  // Kill everything killed on any path between this block and the
918  // dominated block. We don't have to traverse these paths if the
919  // value map and the dominators list is already empty. If the range
920  // of block ids (block_id, dominated_id) is empty there are no such
921  // paths.
922  if ((!successor_map->IsEmpty() || !successor_dominators->IsEmpty()) &&
923  dominator_block->block_id() + 1 < dominated->block_id()) {
924  visited_on_paths_.Clear();
925  SideEffects side_effects_on_all_paths =
926  CollectSideEffectsOnPathsToDominatedBlock(dominator_block,
927  dominated);
928  successor_map->Kill(side_effects_on_all_paths);
929  successor_dominators->Kill(side_effects_on_all_paths);
930  }
931  }
932  current = next;
933  }
934 }
935 
936 } } // namespace v8::internal
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter NULL
Definition: flags.cc:269
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter NULL
HInstructionMap(Zone *zone, SideEffectsTracker *side_effects_tracker)
Definition: hydrogen-gvn.cc:37
#define TRACE_GVN_2(msg, a1, a2)
#define ASSERT(condition)
Definition: checks.h:329
kInstanceClassNameOffset flag
Definition: objects-inl.h:5115
HInstruction * operator[](int i) const
bool IsEmpty() const
Definition: hydrogen-gvn.cc:62
const int kPointerSize
Definition: globals.h:268
T * NewArray(size_t size)
Definition: allocation.h:83
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf map
HInstruction * at(int i) const
#define TRACE_GVN_4(msg, a1, a2, a3, a4)
HInstructionMap * Copy(Zone *zone) const
Definition: hydrogen-gvn.cc:58
#define GVN_TRACKED_FLAG_LIST(V)
#define TRACE_GVN_1(msg, a1)
GvnBasicBlockState * next_in_dominator_tree_traversal(Zone *zone, HBasicBlock **dominator)
static void VPrint(const char *format, va_list args)
static GvnBasicBlockState * CreateEntry(Zone *zone, HBasicBlock *entry_block, HInstructionMap *entry_map)
void Add(HInstruction *instr, Zone *zone)
Definition: hydrogen-gvn.cc:51
#define BASE_EMBEDDED
Definition: allocation.h:68
#define GVN_UNTRACKED_FLAG_LIST(V)
#define TRACE_GVN_5(msg, a1, a2, a3, a4, a5)
#define DECLARE_FLAG(Type)
Handle< T > handle(T *t, Isolate *isolate)
Definition: handles.h:103
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function info
#define ASSERT_EQ(v1, v2)
Definition: checks.h:330
void USE(T)
Definition: globals.h:341
void TraceGVN(const char *msg,...)