55 if (!check->index()->representation().IsSmiOrInteger32())
return NULL;
58 HConstant* constant =
NULL;
61 if (check->index()->IsAdd()) {
62 HAdd* index = HAdd::cast(check->index());
63 if (index->left()->IsConstant()) {
64 constant = HConstant::cast(index->left());
65 index_base = index->right();
66 }
else if (index->right()->IsConstant()) {
67 constant = HConstant::cast(index->right());
68 index_base = index->left();
70 }
else if (check->index()->IsSub()) {
71 HSub* index = HSub::cast(check->index());
73 if (index->left()->IsConstant()) {
74 constant = HConstant::cast(index->left());
75 index_base = index->right();
76 }
else if (index->right()->IsConstant()) {
77 constant = HConstant::cast(index->right());
78 index_base = index->left();
82 if (constant !=
NULL && constant->HasInteger32Value()) {
83 *offset = is_sub ? - constant->Integer32Value()
84 : constant->Integer32Value();
87 index_base = check->index();
95 : index_base_(index_base),
101 DISALLOW_COPY_AND_ASSIGN(BoundsCheckKey);
139 ASSERT(data->upper_offset_ < offset);
140 data->upper_offset_ = offset;
148 ASSERT(data->lower_offset_ > offset);
149 data->lower_offset_ = offset;
168 ASSERT(new_check->index()->representation().IsSmiOrInteger32());
169 bool keep_new_check =
false;
171 if (new_offset > upper_offset_) {
172 upper_offset_ = new_offset;
174 keep_new_check =
true;
175 upper_check_ = new_check;
177 TightenCheck(upper_check_, new_check, new_offset);
180 }
else if (new_offset < lower_offset_) {
181 lower_offset_ = new_offset;
183 keep_new_check =
true;
184 lower_check_ = new_check;
186 TightenCheck(lower_check_, new_check, new_offset);
194 if (!keep_new_check) {
195 if (FLAG_trace_bce) {
196 OS::Print(
"Eliminating check #%d after tightening\n",
199 new_check->block()->graph()->isolate()->counters()->
200 bounds_checks_eliminated()->Increment();
201 new_check->DeleteAndReplaceWith(new_check->ActualValue());
203 HBoundsCheck* first_check = new_check == lower_check_ ? upper_check_
205 if (FLAG_trace_bce) {
206 OS::Print(
"Moving second check #%d after first check #%d\n",
207 new_check->id(), first_check->id());
210 ASSERT(new_check->length() == first_check->length());
213 new_check->InsertAfter(first_check);
214 MoveIndexIfNecessary(new_check->index(), new_check, old_position);
222 HBoundsCheck* lower_check,
223 HBoundsCheck* upper_check,
227 lower_offset_(lower_offset),
228 upper_offset_(upper_offset),
230 lower_check_(lower_check),
231 upper_check_(upper_check),
232 next_in_bb_(next_in_bb),
233 father_in_dt_(father_in_dt) { }
239 HBasicBlock* basic_block_;
240 HBoundsCheck* lower_check_;
241 HBoundsCheck* upper_check_;
245 void MoveIndexIfNecessary(
HValue* index_raw,
246 HBoundsCheck* insert_before,
248 if (!index_raw->IsAdd() && !index_raw->IsSub()) {
253 HArithmeticBinaryOperation* index =
255 HValue* left_input = index->left();
256 HValue* right_input = index->right();
257 bool must_move_index =
false;
258 bool must_move_left_input =
false;
259 bool must_move_right_input =
false;
260 for (HInstruction* cursor = end_of_scan_range; cursor != insert_before;) {
261 if (cursor == left_input) must_move_left_input =
true;
262 if (cursor == right_input) must_move_right_input =
true;
263 if (cursor == index) must_move_index =
true;
264 if (cursor->previous() ==
NULL) {
265 cursor = cursor->block()->dominator()->end();
267 cursor = cursor->previous();
270 if (must_move_index) {
272 index->InsertBefore(insert_before);
276 if (must_move_left_input) {
277 HConstant::cast(left_input)->Unlink();
278 HConstant::cast(left_input)->InsertBefore(index);
280 if (must_move_right_input) {
281 HConstant::cast(right_input)->Unlink();
282 HConstant::cast(right_input)->InsertBefore(index);
286 void TightenCheck(HBoundsCheck* original_check,
287 HBoundsCheck* tighter_check,
289 ASSERT(original_check->length() == tighter_check->length());
290 MoveIndexIfNecessary(tighter_check->index(), original_check, tighter_check);
291 original_check->ReplaceAllUsesWith(original_check->index());
292 original_check->SetOperandAt(0, tighter_check->index());
293 if (FLAG_trace_bce) {
294 OS::Print(
"Tightened check #%d with offset %d from #%d\n",
295 original_check->id(), new_offset, tighter_check->id());
303 static bool BoundsCheckKeyMatch(
void* key1,
void* key2) {
304 BoundsCheckKey* k1 =
static_cast<BoundsCheckKey*
>(key1);
305 BoundsCheckKey* k2 =
static_cast<BoundsCheckKey*
>(key2);
306 return k1->IndexBase() == k2->IndexBase() && k1->Length() == k2->Length();
322 void BoundsCheckTable::Insert(BoundsCheckKey* key,
323 BoundsCheckBbData* data,
325 Lookup(key, key->Hash(),
true, ZoneAllocationPolicy(zone))->value = data;
329 void BoundsCheckTable::Delete(BoundsCheckKey* key) {
347 void HBoundsCheckEliminationPhase::EliminateRedundantBoundsChecks(
348 HBasicBlock* entry) {
360 while (stack_depth > 0) {
361 int current = stack_depth - 1;
365 if (state->
index_ < children->length()) {
367 HBasicBlock* child = children->
at(state->
index_++);
368 int next = stack_depth++;
369 stack[next].
block_ = child;
381 BoundsCheckBbData* HBoundsCheckEliminationPhase::PreProcessBlock(
383 BoundsCheckBbData* bb_data_list =
NULL;
385 for (HInstructionIterator it(bb); !it.Done(); it.Advance()) {
386 HInstruction* i = it.Current();
387 if (!i->IsBoundsCheck())
continue;
389 HBoundsCheck*
check = HBoundsCheck::cast(i);
391 BoundsCheckKey* key =
393 if (key ==
NULL)
continue;
394 BoundsCheckBbData** data_p = table_.LookupOrInsert(key, zone());
395 BoundsCheckBbData* data = *data_p;
397 bb_data_list =
new(zone()) BoundsCheckBbData(key,
405 *data_p = bb_data_list;
406 if (FLAG_trace_bce) {
407 OS::Print(
"Fresh bounds check data for block #%d: [%d]\n",
408 bb->block_id(), offset);
410 }
else if (data->OffsetIsCovered(offset)) {
411 bb->graph()->isolate()->counters()->
412 bounds_checks_eliminated()->Increment();
413 if (FLAG_trace_bce) {
414 OS::Print(
"Eliminating bounds check #%d, offset %d is covered\n",
415 check->id(), offset);
417 check->DeleteAndReplaceWith(check->ActualValue());
418 }
else if (data->BasicBlock() == bb) {
432 data->CoverCheck(check, offset);
433 }
else if (
graph()->use_optimistic_licm() ||
434 bb->IsLoopSuccessorDominator()) {
435 int32_t new_lower_offset = offset < data->LowerOffset()
437 : data->LowerOffset();
438 int32_t new_upper_offset = offset > data->UpperOffset()
440 : data->UpperOffset();
441 bb_data_list =
new(zone()) BoundsCheckBbData(key,
449 if (FLAG_trace_bce) {
450 OS::Print(
"Updated bounds check data for block #%d: [%d - %d]\n",
451 bb->block_id(), new_lower_offset, new_upper_offset);
453 table_.Insert(key, bb_data_list, zone());
461 void HBoundsCheckEliminationPhase::PostProcessBlock(
462 HBasicBlock* block, BoundsCheckBbData* data) {
463 while (data !=
NULL) {
464 if (data->FatherInDominatorTree()) {
465 table_.Insert(data->Key(), data->FatherInDominatorTree(), zone());
467 table_.Delete(data->Key());
469 data = data->NextInBasicBlock();
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter NULL
BoundsCheckBbData(BoundsCheckKey *key, int32_t lower_offset, int32_t upper_offset, HBasicBlock *bb, HBoundsCheck *lower_check, HBoundsCheck *upper_check, BoundsCheckBbData *next_in_bb, BoundsCheckBbData *father_in_dt)
#define ASSERT(condition)
BoundsCheckBbData * NextInBasicBlock() const
void UpdateLowerOffsets(HBoundsCheck *check, int32_t offset)
BoundsCheckKey * Key() const
int32_t LowerOffset() const
BoundsCheckTable(Zone *zone)
void check(i::Vector< const uint8_t > string)
int32_t UpperOffset() const
Entry * Lookup(void *key, uint32_t hash, bool insert, ZoneAllocationPolicyallocator=ZoneAllocationPolicy())
HInstruction * next() const
BoundsCheckBbData * FatherInDominatorTree() const
virtual intptr_t Hashcode()
static void Print(const char *format,...)
bool OffsetIsCovered(int32_t offset) const
static BoundsCheckKey * Create(Zone *zone, HBoundsCheck *check, int32_t *offset)
HBasicBlock * BasicBlock() const
HValue * IndexBase() const
void CoverCheck(HBoundsCheck *new_check, int32_t new_offset)
void * Remove(void *key, uint32_t hash)
BoundsCheckBbData * bb_data_list_
static HValue * cast(HValue *value)
HBoundsCheck * UpperCheck() const
HBoundsCheck * LowerCheck() const
void UpdateUpperOffsets(HBoundsCheck *check, int32_t offset)