v8  3.25.30(node0.11.13)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
hydrogen-bce.cc
Go to the documentation of this file.
1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "hydrogen-bce.h"
29 
30 namespace v8 {
31 namespace internal {
32 
33 
34 // We try to "factor up" HBoundsCheck instructions towards the root of the
35 // dominator tree.
36 // For now we handle checks where the index is like "exp + int32value".
37 // If in the dominator tree we check "exp + v1" and later (dominated)
38 // "exp + v2", if v2 <= v1 we can safely remove the second check, and if
39 // v2 > v1 we can use v2 in the 1st check and again remove the second.
40 // To do so we keep a dictionary of all checks where the key if the pair
41 // "exp, length".
42 // The class BoundsCheckKey represents this key.
43 class BoundsCheckKey : public ZoneObject {
44  public:
45  HValue* IndexBase() const { return index_base_; }
46  HValue* Length() const { return length_; }
47 
48  uint32_t Hash() {
49  return static_cast<uint32_t>(index_base_->Hashcode() ^ length_->Hashcode());
50  }
51 
52  static BoundsCheckKey* Create(Zone* zone,
53  HBoundsCheck* check,
54  int32_t* offset) {
55  if (!check->index()->representation().IsSmiOrInteger32()) return NULL;
56 
57  HValue* index_base = NULL;
58  HConstant* constant = NULL;
59  bool is_sub = false;
60 
61  if (check->index()->IsAdd()) {
62  HAdd* index = HAdd::cast(check->index());
63  if (index->left()->IsConstant()) {
64  constant = HConstant::cast(index->left());
65  index_base = index->right();
66  } else if (index->right()->IsConstant()) {
67  constant = HConstant::cast(index->right());
68  index_base = index->left();
69  }
70  } else if (check->index()->IsSub()) {
71  HSub* index = HSub::cast(check->index());
72  is_sub = true;
73  if (index->left()->IsConstant()) {
74  constant = HConstant::cast(index->left());
75  index_base = index->right();
76  } else if (index->right()->IsConstant()) {
77  constant = HConstant::cast(index->right());
78  index_base = index->left();
79  }
80  }
81 
82  if (constant != NULL && constant->HasInteger32Value()) {
83  *offset = is_sub ? - constant->Integer32Value()
84  : constant->Integer32Value();
85  } else {
86  *offset = 0;
87  index_base = check->index();
88  }
89 
90  return new(zone) BoundsCheckKey(index_base, check->length());
91  }
92 
93  private:
94  BoundsCheckKey(HValue* index_base, HValue* length)
95  : index_base_(index_base),
96  length_(length) { }
97 
98  HValue* index_base_;
99  HValue* length_;
100 
101  DISALLOW_COPY_AND_ASSIGN(BoundsCheckKey);
102 };
103 
104 
105 // Data about each HBoundsCheck that can be eliminated or moved.
106 // It is the "value" in the dictionary indexed by "base-index, length"
107 // (the key is BoundsCheckKey).
108 // We scan the code with a dominator tree traversal.
109 // Traversing the dominator tree we keep a stack (implemented as a singly
110 // linked list) of "data" for each basic block that contains a relevant check
111 // with the same key (the dictionary holds the head of the list).
112 // We also keep all the "data" created for a given basic block in a list, and
113 // use it to "clean up" the dictionary when backtracking in the dominator tree
114 // traversal.
115 // Doing this each dictionary entry always directly points to the check that
116 // is dominating the code being examined now.
117 // We also track the current "offset" of the index expression and use it to
118 // decide if any check is already "covered" (so it can be removed) or not.
120  public:
121  BoundsCheckKey* Key() const { return key_; }
122  int32_t LowerOffset() const { return lower_offset_; }
123  int32_t UpperOffset() const { return upper_offset_; }
124  HBasicBlock* BasicBlock() const { return basic_block_; }
125  HBoundsCheck* LowerCheck() const { return lower_check_; }
126  HBoundsCheck* UpperCheck() const { return upper_check_; }
127  BoundsCheckBbData* NextInBasicBlock() const { return next_in_bb_; }
128  BoundsCheckBbData* FatherInDominatorTree() const { return father_in_dt_; }
129 
130  bool OffsetIsCovered(int32_t offset) const {
131  return offset >= LowerOffset() && offset <= UpperOffset();
132  }
133 
134  bool HasSingleCheck() { return lower_check_ == upper_check_; }
135 
136  void UpdateUpperOffsets(HBoundsCheck* check, int32_t offset) {
138  while (data != NULL && data->UpperCheck() == check) {
139  ASSERT(data->upper_offset_ < offset);
140  data->upper_offset_ = offset;
141  data = data->FatherInDominatorTree();
142  }
143  }
144 
145  void UpdateLowerOffsets(HBoundsCheck* check, int32_t offset) {
147  while (data != NULL && data->LowerCheck() == check) {
148  ASSERT(data->lower_offset_ > offset);
149  data->lower_offset_ = offset;
150  data = data->FatherInDominatorTree();
151  }
152  }
153 
154  // The goal of this method is to modify either upper_offset_ or
155  // lower_offset_ so that also new_offset is covered (the covered
156  // range grows).
157  //
158  // The precondition is that new_check follows UpperCheck() and
159  // LowerCheck() in the same basic block, and that new_offset is not
160  // covered (otherwise we could simply remove new_check).
161  //
162  // If HasSingleCheck() is true then new_check is added as "second check"
163  // (either upper or lower; note that HasSingleCheck() becomes false).
164  // Otherwise one of the current checks is modified so that it also covers
165  // new_offset, and new_check is removed.
166  void CoverCheck(HBoundsCheck* new_check,
167  int32_t new_offset) {
168  ASSERT(new_check->index()->representation().IsSmiOrInteger32());
169  bool keep_new_check = false;
170 
171  if (new_offset > upper_offset_) {
172  upper_offset_ = new_offset;
173  if (HasSingleCheck()) {
174  keep_new_check = true;
175  upper_check_ = new_check;
176  } else {
177  TightenCheck(upper_check_, new_check, new_offset);
178  UpdateUpperOffsets(upper_check_, upper_offset_);
179  }
180  } else if (new_offset < lower_offset_) {
181  lower_offset_ = new_offset;
182  if (HasSingleCheck()) {
183  keep_new_check = true;
184  lower_check_ = new_check;
185  } else {
186  TightenCheck(lower_check_, new_check, new_offset);
187  UpdateLowerOffsets(lower_check_, lower_offset_);
188  }
189  } else {
190  // Should never have called CoverCheck() in this case.
191  UNREACHABLE();
192  }
193 
194  if (!keep_new_check) {
195  if (FLAG_trace_bce) {
196  OS::Print("Eliminating check #%d after tightening\n",
197  new_check->id());
198  }
199  new_check->block()->graph()->isolate()->counters()->
200  bounds_checks_eliminated()->Increment();
201  new_check->DeleteAndReplaceWith(new_check->ActualValue());
202  } else {
203  HBoundsCheck* first_check = new_check == lower_check_ ? upper_check_
204  : lower_check_;
205  if (FLAG_trace_bce) {
206  OS::Print("Moving second check #%d after first check #%d\n",
207  new_check->id(), first_check->id());
208  }
209  // The length is guaranteed to be live at first_check.
210  ASSERT(new_check->length() == first_check->length());
211  HInstruction* old_position = new_check->next();
212  new_check->Unlink();
213  new_check->InsertAfter(first_check);
214  MoveIndexIfNecessary(new_check->index(), new_check, old_position);
215  }
216  }
217 
219  int32_t lower_offset,
220  int32_t upper_offset,
221  HBasicBlock* bb,
222  HBoundsCheck* lower_check,
223  HBoundsCheck* upper_check,
224  BoundsCheckBbData* next_in_bb,
225  BoundsCheckBbData* father_in_dt)
226  : key_(key),
227  lower_offset_(lower_offset),
228  upper_offset_(upper_offset),
229  basic_block_(bb),
230  lower_check_(lower_check),
231  upper_check_(upper_check),
232  next_in_bb_(next_in_bb),
233  father_in_dt_(father_in_dt) { }
234 
235  private:
236  BoundsCheckKey* key_;
237  int32_t lower_offset_;
238  int32_t upper_offset_;
239  HBasicBlock* basic_block_;
240  HBoundsCheck* lower_check_;
241  HBoundsCheck* upper_check_;
242  BoundsCheckBbData* next_in_bb_;
243  BoundsCheckBbData* father_in_dt_;
244 
245  void MoveIndexIfNecessary(HValue* index_raw,
246  HBoundsCheck* insert_before,
247  HInstruction* end_of_scan_range) {
248  if (!index_raw->IsAdd() && !index_raw->IsSub()) {
249  // index_raw can be HAdd(index_base, offset), HSub(index_base, offset),
250  // or index_base directly. In the latter case, no need to move anything.
251  return;
252  }
253  HArithmeticBinaryOperation* index =
255  HValue* left_input = index->left();
256  HValue* right_input = index->right();
257  bool must_move_index = false;
258  bool must_move_left_input = false;
259  bool must_move_right_input = false;
260  for (HInstruction* cursor = end_of_scan_range; cursor != insert_before;) {
261  if (cursor == left_input) must_move_left_input = true;
262  if (cursor == right_input) must_move_right_input = true;
263  if (cursor == index) must_move_index = true;
264  if (cursor->previous() == NULL) {
265  cursor = cursor->block()->dominator()->end();
266  } else {
267  cursor = cursor->previous();
268  }
269  }
270  if (must_move_index) {
271  index->Unlink();
272  index->InsertBefore(insert_before);
273  }
274  // The BCE algorithm only selects mergeable bounds checks that share
275  // the same "index_base", so we'll only ever have to move constants.
276  if (must_move_left_input) {
277  HConstant::cast(left_input)->Unlink();
278  HConstant::cast(left_input)->InsertBefore(index);
279  }
280  if (must_move_right_input) {
281  HConstant::cast(right_input)->Unlink();
282  HConstant::cast(right_input)->InsertBefore(index);
283  }
284  }
285 
286  void TightenCheck(HBoundsCheck* original_check,
287  HBoundsCheck* tighter_check,
288  int32_t new_offset) {
289  ASSERT(original_check->length() == tighter_check->length());
290  MoveIndexIfNecessary(tighter_check->index(), original_check, tighter_check);
291  original_check->ReplaceAllUsesWith(original_check->index());
292  original_check->SetOperandAt(0, tighter_check->index());
293  if (FLAG_trace_bce) {
294  OS::Print("Tightened check #%d with offset %d from #%d\n",
295  original_check->id(), new_offset, tighter_check->id());
296  }
297  }
298 
299  DISALLOW_COPY_AND_ASSIGN(BoundsCheckBbData);
300 };
301 
302 
303 static bool BoundsCheckKeyMatch(void* key1, void* key2) {
304  BoundsCheckKey* k1 = static_cast<BoundsCheckKey*>(key1);
305  BoundsCheckKey* k2 = static_cast<BoundsCheckKey*>(key2);
306  return k1->IndexBase() == k2->IndexBase() && k1->Length() == k2->Length();
307 }
308 
309 
311  : ZoneHashMap(BoundsCheckKeyMatch, ZoneHashMap::kDefaultHashMapCapacity,
312  ZoneAllocationPolicy(zone)) { }
313 
314 
315 BoundsCheckBbData** BoundsCheckTable::LookupOrInsert(BoundsCheckKey* key,
316  Zone* zone) {
317  return reinterpret_cast<BoundsCheckBbData**>(
318  &(Lookup(key, key->Hash(), true, ZoneAllocationPolicy(zone))->value));
319 }
320 
321 
322 void BoundsCheckTable::Insert(BoundsCheckKey* key,
323  BoundsCheckBbData* data,
324  Zone* zone) {
325  Lookup(key, key->Hash(), true, ZoneAllocationPolicy(zone))->value = data;
326 }
327 
328 
329 void BoundsCheckTable::Delete(BoundsCheckKey* key) {
330  Remove(key, key->Hash());
331 }
332 
333 
335  public:
336  HBasicBlock* block_;
338  int index_;
339 };
340 
341 
342 // Eliminates checks in bb and recursively in the dominated blocks.
343 // Also replace the results of check instructions with the original value, if
344 // the result is used. This is safe now, since we don't do code motion after
345 // this point. It enables better register allocation since the value produced
346 // by check instructions is really a copy of the original value.
347 void HBoundsCheckEliminationPhase::EliminateRedundantBoundsChecks(
348  HBasicBlock* entry) {
349  // Allocate the stack.
351  zone()->NewArray<HBoundsCheckEliminationState>(graph()->blocks()->length());
352 
353  // Explicitly push the entry block.
354  stack[0].block_ = entry;
355  stack[0].bb_data_list_ = PreProcessBlock(entry);
356  stack[0].index_ = 0;
357  int stack_depth = 1;
358 
359  // Implement depth-first traversal with a stack.
360  while (stack_depth > 0) {
361  int current = stack_depth - 1;
362  HBoundsCheckEliminationState* state = &stack[current];
363  const ZoneList<HBasicBlock*>* children = state->block_->dominated_blocks();
364 
365  if (state->index_ < children->length()) {
366  // Recursively visit children blocks.
367  HBasicBlock* child = children->at(state->index_++);
368  int next = stack_depth++;
369  stack[next].block_ = child;
370  stack[next].bb_data_list_ = PreProcessBlock(child);
371  stack[next].index_ = 0;
372  } else {
373  // Finished with all children; post process the block.
374  PostProcessBlock(state->block_, state->bb_data_list_);
375  stack_depth--;
376  }
377  }
378 }
379 
380 
381 BoundsCheckBbData* HBoundsCheckEliminationPhase::PreProcessBlock(
382  HBasicBlock* bb) {
383  BoundsCheckBbData* bb_data_list = NULL;
384 
385  for (HInstructionIterator it(bb); !it.Done(); it.Advance()) {
386  HInstruction* i = it.Current();
387  if (!i->IsBoundsCheck()) continue;
388 
389  HBoundsCheck* check = HBoundsCheck::cast(i);
390  int32_t offset;
391  BoundsCheckKey* key =
392  BoundsCheckKey::Create(zone(), check, &offset);
393  if (key == NULL) continue;
394  BoundsCheckBbData** data_p = table_.LookupOrInsert(key, zone());
395  BoundsCheckBbData* data = *data_p;
396  if (data == NULL) {
397  bb_data_list = new(zone()) BoundsCheckBbData(key,
398  offset,
399  offset,
400  bb,
401  check,
402  check,
403  bb_data_list,
404  NULL);
405  *data_p = bb_data_list;
406  if (FLAG_trace_bce) {
407  OS::Print("Fresh bounds check data for block #%d: [%d]\n",
408  bb->block_id(), offset);
409  }
410  } else if (data->OffsetIsCovered(offset)) {
411  bb->graph()->isolate()->counters()->
412  bounds_checks_eliminated()->Increment();
413  if (FLAG_trace_bce) {
414  OS::Print("Eliminating bounds check #%d, offset %d is covered\n",
415  check->id(), offset);
416  }
417  check->DeleteAndReplaceWith(check->ActualValue());
418  } else if (data->BasicBlock() == bb) {
419  // TODO(jkummerow): I think the following logic would be preferable:
420  // if (data->Basicblock() == bb ||
421  // graph()->use_optimistic_licm() ||
422  // bb->IsLoopSuccessorDominator()) {
423  // data->CoverCheck(check, offset)
424  // } else {
425  // /* add pristine BCBbData like in (data == NULL) case above */
426  // }
427  // Even better would be: distinguish between read-only dominator-imposed
428  // knowledge and modifiable upper/lower checks.
429  // What happens currently is that the first bounds check in a dominated
430  // block will stay around while any further checks are hoisted out,
431  // which doesn't make sense. Investigate/fix this in a future CL.
432  data->CoverCheck(check, offset);
433  } else if (graph()->use_optimistic_licm() ||
434  bb->IsLoopSuccessorDominator()) {
435  int32_t new_lower_offset = offset < data->LowerOffset()
436  ? offset
437  : data->LowerOffset();
438  int32_t new_upper_offset = offset > data->UpperOffset()
439  ? offset
440  : data->UpperOffset();
441  bb_data_list = new(zone()) BoundsCheckBbData(key,
442  new_lower_offset,
443  new_upper_offset,
444  bb,
445  data->LowerCheck(),
446  data->UpperCheck(),
447  bb_data_list,
448  data);
449  if (FLAG_trace_bce) {
450  OS::Print("Updated bounds check data for block #%d: [%d - %d]\n",
451  bb->block_id(), new_lower_offset, new_upper_offset);
452  }
453  table_.Insert(key, bb_data_list, zone());
454  }
455  }
456 
457  return bb_data_list;
458 }
459 
460 
461 void HBoundsCheckEliminationPhase::PostProcessBlock(
462  HBasicBlock* block, BoundsCheckBbData* data) {
463  while (data != NULL) {
464  if (data->FatherInDominatorTree()) {
465  table_.Insert(data->Key(), data->FatherInDominatorTree(), zone());
466  } else {
467  table_.Delete(data->Key());
468  }
469  data = data->NextInBasicBlock();
470  }
471 }
472 
473 } } // namespace v8::internal
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter NULL
Definition: flags.cc:269
T & at(int i) const
Definition: list.h:90
int int32_t
Definition: unicode.cc:47
BoundsCheckBbData(BoundsCheckKey *key, int32_t lower_offset, int32_t upper_offset, HBasicBlock *bb, HBoundsCheck *lower_check, HBoundsCheck *upper_check, BoundsCheckBbData *next_in_bb, BoundsCheckBbData *father_in_dt)
#define ASSERT(condition)
Definition: checks.h:329
BoundsCheckBbData * NextInBasicBlock() const
HGraph * graph() const
Definition: hydrogen.h:2695
void UpdateLowerOffsets(HBoundsCheck *check, int32_t offset)
BoundsCheckKey * Key() const
#define UNREACHABLE()
Definition: checks.h:52
void check(i::Vector< const uint8_t > string)
Entry * Lookup(void *key, uint32_t hash, bool insert, ZoneAllocationPolicyallocator=ZoneAllocationPolicy())
HInstruction * next() const
BoundsCheckBbData * FatherInDominatorTree() const
virtual intptr_t Hashcode()
static void Print(const char *format,...)
bool OffsetIsCovered(int32_t offset) const
static BoundsCheckKey * Create(Zone *zone, HBoundsCheck *check, int32_t *offset)
Definition: hydrogen-bce.cc:52
HBasicBlock * BasicBlock() const
HValue * IndexBase() const
Definition: hydrogen-bce.cc:45
void CoverCheck(HBoundsCheck *new_check, int32_t new_offset)
HValue * Length() const
Definition: hydrogen-bce.cc:46
static HValue * cast(HValue *value)
HBoundsCheck * UpperCheck() const
HBoundsCheck * LowerCheck() const
void UpdateUpperOffsets(HBoundsCheck *check, int32_t offset)