35 class HInstructionMap
V8_FINAL :
public ZoneObject {
43 free_list_head_(kNil),
44 side_effects_tracker_(side_effects_tracker) {
45 ResizeLists(kInitialSize, zone);
46 Resize(kInitialSize, zone);
49 void Kill(SideEffects side_effects);
52 present_depends_on_.Add(side_effects_tracker_->ComputeDependsOn(instr));
59 return new(zone) HInstructionMap(zone,
this);
62 bool IsEmpty()
const {
return count_ == 0; }
66 struct HInstructionMapListElement {
70 static const int kNil = -1;
73 static const int kInitialSize = 16;
75 HInstructionMap(Zone* zone,
const HInstructionMap* other);
77 void Resize(
int new_size, Zone* zone);
78 void ResizeLists(
int new_size, Zone* zone);
79 void Insert(HInstruction* instr, Zone* zone);
80 uint32_t Bound(uint32_t value)
const {
return value & (array_size_ - 1); }
85 SideEffects present_depends_on_;
86 HInstructionMapListElement* array_;
89 HInstructionMapListElement* lists_;
92 SideEffectsTracker* side_effects_tracker_;
99 explicit HSideEffectMap(HSideEffectMap* other);
100 HSideEffectMap& operator= (
const HSideEffectMap& other);
102 void Kill(SideEffects side_effects);
104 void Store(SideEffects side_effects, HInstruction* instr);
123 va_start(arguments, msg);
131 #define TRACE_GVN_1(msg, a1) \
132 if (FLAG_trace_gvn) { \
136 #define TRACE_GVN_2(msg, a1, a2) \
137 if (FLAG_trace_gvn) { \
138 TraceGVN(msg, a1, a2); \
141 #define TRACE_GVN_3(msg, a1, a2, a3) \
142 if (FLAG_trace_gvn) { \
143 TraceGVN(msg, a1, a2, a3); \
146 #define TRACE_GVN_4(msg, a1, a2, a3, a4) \
147 if (FLAG_trace_gvn) { \
148 TraceGVN(msg, a1, a2, a3, a4); \
151 #define TRACE_GVN_5(msg, a1, a2, a3, a4, a5) \
152 if (FLAG_trace_gvn) { \
153 TraceGVN(msg, a1, a2, a3, a4, a5); \
157 HInstructionMap::HInstructionMap(Zone* zone,
const HInstructionMap* other)
158 : array_size_(other->array_size_),
159 lists_size_(other->lists_size_),
160 count_(other->count_),
161 present_depends_on_(other->present_depends_on_),
162 array_(zone->
NewArray<HInstructionMapListElement>(other->array_size_)),
163 lists_(zone->
NewArray<HInstructionMapListElement>(other->lists_size_)),
164 free_list_head_(other->free_list_head_),
165 side_effects_tracker_(other->side_effects_tracker_) {
167 array_, other->array_, array_size_ *
sizeof(HInstructionMapListElement));
169 lists_, other->lists_, lists_size_ *
sizeof(HInstructionMapListElement));
173 void HInstructionMap::Kill(SideEffects changes) {
174 if (!present_depends_on_.ContainsAnyOf(changes))
return;
175 present_depends_on_.RemoveAll();
176 for (
int i = 0; i < array_size_; ++i) {
177 HInstruction* instr = array_[i].instr;
182 for (
int current = array_[i].next; current != kNil; current = next) {
183 next = lists_[current].next;
184 HInstruction* instr = lists_[current].instr;
185 SideEffects depends_on = side_effects_tracker_->ComputeDependsOn(instr);
186 if (depends_on.ContainsAnyOf(changes)) {
189 lists_[current].next = free_list_head_;
190 free_list_head_ = current;
193 lists_[current].next = kept;
195 present_depends_on_.Add(depends_on);
198 array_[i].next = kept;
201 instr = array_[i].instr;
202 SideEffects depends_on = side_effects_tracker_->ComputeDependsOn(instr);
203 if (depends_on.ContainsAnyOf(changes)) {
205 int head = array_[i].next;
207 array_[i].instr =
NULL;
209 array_[i].instr = lists_[head].instr;
210 array_[i].next = lists_[head].next;
211 lists_[head].next = free_list_head_;
212 free_list_head_ = head;
215 present_depends_on_.Add(depends_on);
222 HInstruction* HInstructionMap::Lookup(HInstruction* instr)
const {
223 uint32_t hash =
static_cast<uint32_t
>(instr->Hashcode());
224 uint32_t pos = Bound(hash);
225 if (array_[pos].instr !=
NULL) {
226 if (array_[pos].instr->Equals(instr))
return array_[pos].instr;
227 int next = array_[pos].next;
228 while (next != kNil) {
229 if (lists_[next].instr->Equals(instr))
return lists_[next].instr;
230 next = lists_[next].next;
237 void HInstructionMap::Resize(
int new_size, Zone* zone) {
238 ASSERT(new_size > count_);
243 if (free_list_head_ == kNil) {
244 ResizeLists(lists_size_ << 1, zone);
247 HInstructionMapListElement* new_array =
248 zone->NewArray<HInstructionMapListElement>(new_size);
249 memset(new_array, 0,
sizeof(HInstructionMapListElement) * new_size);
251 HInstructionMapListElement* old_array = array_;
252 int old_size = array_size_;
254 int old_count = count_;
257 array_size_ = new_size;
260 if (old_array !=
NULL) {
262 for (
int i = 0; i < old_size; ++i) {
263 if (old_array[i].instr !=
NULL) {
264 int current = old_array[i].next;
265 while (current != kNil) {
266 Insert(lists_[current].instr, zone);
267 int next = lists_[current].next;
268 lists_[current].next = free_list_head_;
269 free_list_head_ = current;
273 Insert(old_array[i].instr, zone);
278 ASSERT(count_ == old_count);
282 void HInstructionMap::ResizeLists(
int new_size, Zone* zone) {
283 ASSERT(new_size > lists_size_);
285 HInstructionMapListElement* new_lists =
286 zone->NewArray<HInstructionMapListElement>(new_size);
287 memset(new_lists, 0,
sizeof(HInstructionMapListElement) * new_size);
289 HInstructionMapListElement* old_lists = lists_;
290 int old_size = lists_size_;
292 lists_size_ = new_size;
295 if (old_lists !=
NULL) {
297 lists_, old_lists, old_size *
sizeof(HInstructionMapListElement));
299 for (
int i = old_size; i < lists_size_; ++i) {
300 lists_[i].next = free_list_head_;
306 void HInstructionMap::Insert(HInstruction* instr, Zone* zone) {
309 if (count_ >= array_size_ >> 1) Resize(array_size_ << 1, zone);
310 ASSERT(count_ < array_size_);
312 uint32_t pos = Bound(static_cast<uint32_t>(instr->Hashcode()));
313 if (array_[pos].instr ==
NULL) {
314 array_[pos].instr = instr;
315 array_[pos].next = kNil;
317 if (free_list_head_ == kNil) {
318 ResizeLists(lists_size_ << 1, zone);
320 int new_element_pos = free_list_head_;
321 ASSERT(new_element_pos != kNil);
322 free_list_head_ = lists_[free_list_head_].next;
323 lists_[new_element_pos].instr = instr;
324 lists_[new_element_pos].next = array_[pos].next;
325 ASSERT(array_[pos].next == kNil || lists_[array_[pos].next].instr !=
NULL);
326 array_[pos].next = new_element_pos;
331 HSideEffectMap::HSideEffectMap() : count_(0) {
336 HSideEffectMap::HSideEffectMap(HSideEffectMap* other) : count_(other->count_) {
341 HSideEffectMap& HSideEffectMap::operator= (
const HSideEffectMap& other) {
342 if (
this != &other) {
349 void HSideEffectMap::Kill(SideEffects side_effects) {
351 if (side_effects.ContainsFlag(GVNFlagFromInt(i))) {
352 if (data_[i] !=
NULL) count_--;
359 void HSideEffectMap::Store(SideEffects side_effects, HInstruction* instr) {
361 if (side_effects.ContainsFlag(GVNFlagFromInt(i))) {
362 if (data_[i] ==
NULL) count_++;
369 SideEffects SideEffectsTracker::ComputeChanges(HInstruction* instr) {
371 SideEffects result(instr->ChangesFlags());
372 if (result.ContainsFlag(kGlobalVars)) {
373 if (instr->IsStoreGlobalCell() &&
374 ComputeGlobalVar(HStoreGlobalCell::cast(instr)->cell(), &index)) {
375 result.RemoveFlag(kGlobalVars);
376 result.AddSpecial(GlobalVar(index));
378 for (index = 0; index < kNumberOfGlobalVars; ++index) {
379 result.AddSpecial(GlobalVar(index));
383 if (result.ContainsFlag(kInobjectFields)) {
384 if (instr->IsStoreNamedField() &&
385 ComputeInobjectField(HStoreNamedField::cast(instr)->access(), &index)) {
386 result.RemoveFlag(kInobjectFields);
387 result.AddSpecial(InobjectField(index));
389 for (index = 0; index < kNumberOfInobjectFields; ++index) {
390 result.AddSpecial(InobjectField(index));
398 SideEffects SideEffectsTracker::ComputeDependsOn(HInstruction* instr) {
400 SideEffects result(instr->DependsOnFlags());
401 if (result.ContainsFlag(kGlobalVars)) {
402 if (instr->IsLoadGlobalCell() &&
403 ComputeGlobalVar(HLoadGlobalCell::cast(instr)->cell(), &index)) {
404 result.RemoveFlag(kGlobalVars);
405 result.AddSpecial(GlobalVar(index));
407 for (index = 0; index < kNumberOfGlobalVars; ++index) {
408 result.AddSpecial(GlobalVar(index));
412 if (result.ContainsFlag(kInobjectFields)) {
413 if (instr->IsLoadNamedField() &&
414 ComputeInobjectField(HLoadNamedField::cast(instr)->access(), &index)) {
415 result.RemoveFlag(kInobjectFields);
416 result.AddSpecial(InobjectField(index));
418 for (index = 0; index < kNumberOfInobjectFields; ++index) {
419 result.AddSpecial(InobjectField(index));
427 void SideEffectsTracker::PrintSideEffectsTo(StringStream* stream,
428 SideEffects side_effects)
const {
429 const char* separator =
"";
433 if (side_effects.ContainsFlag(flag)) {
434 stream->Add(separator);
437 #define DECLARE_FLAG(Type) \
439 stream->Add(#Type); \
449 for (
int index = 0; index < num_global_vars_; ++index) {
450 if (side_effects.ContainsSpecial(GlobalVar(index))) {
451 stream->Add(separator);
453 stream->Add(
"[%p]", *global_vars_[index].
handle());
456 for (
int index = 0; index < num_inobject_fields_; ++index) {
457 if (side_effects.ContainsSpecial(InobjectField(index))) {
458 stream->Add(separator);
460 inobject_fields_[index].PrintTo(stream);
467 bool SideEffectsTracker::ComputeGlobalVar(Unique<Cell> cell,
int* index) {
468 for (
int i = 0; i < num_global_vars_; ++i) {
469 if (cell == global_vars_[i]) {
474 if (num_global_vars_ < kNumberOfGlobalVars) {
475 if (FLAG_trace_gvn) {
476 HeapStringAllocator allocator;
477 StringStream stream(&allocator);
478 stream.Add(
"Tracking global var [%p] (mapped to index %d)\n",
479 *cell.handle(), num_global_vars_);
480 stream.OutputToStdOut();
482 *index = num_global_vars_;
483 global_vars_[num_global_vars_++] = cell;
490 bool SideEffectsTracker::ComputeInobjectField(HObjectAccess access,
492 for (
int i = 0; i < num_inobject_fields_; ++i) {
493 if (access.Equals(inobject_fields_[i])) {
498 if (num_inobject_fields_ < kNumberOfInobjectFields) {
499 if (FLAG_trace_gvn) {
500 HeapStringAllocator allocator;
501 StringStream stream(&allocator);
502 stream.Add(
"Tracking inobject field access ");
503 access.PrintTo(&stream);
504 stream.Add(
" (mapped to index %d)\n", num_inobject_fields_);
505 stream.OutputToStdOut();
507 *index = num_inobject_fields_;
508 inobject_fields_[num_inobject_fields_++] = access;
515 HGlobalValueNumberingPhase::HGlobalValueNumberingPhase(HGraph* graph)
516 : HPhase(
"H_Global value numbering", graph),
517 removed_side_effects_(
false),
518 block_side_effects_(graph->blocks()->length(), zone()),
519 loop_side_effects_(graph->blocks()->length(), zone()),
520 visited_on_paths_(graph->blocks()->length(), zone()) {
521 ASSERT(!AllowHandleAllocation::IsAllowed());
522 block_side_effects_.AddBlock(
523 SideEffects(), graph->blocks()->length(), zone());
524 loop_side_effects_.AddBlock(
525 SideEffects(), graph->blocks()->length(), zone());
529 void HGlobalValueNumberingPhase::Run() {
530 ASSERT(!removed_side_effects_);
531 for (
int i = FLAG_gvn_iterations; i > 0; --i) {
533 ComputeBlockSideEffects();
536 if (FLAG_loop_invariant_code_motion) LoopInvariantCodeMotion();
542 if (!removed_side_effects_)
break;
543 removed_side_effects_ =
false;
546 ASSERT_EQ(block_side_effects_.length(), graph()->blocks()->length());
547 ASSERT_EQ(loop_side_effects_.length(), graph()->blocks()->length());
548 for (
int i = 0; i < graph()->blocks()->length(); ++i) {
549 block_side_effects_[i].RemoveAll();
550 loop_side_effects_[i].RemoveAll();
552 visited_on_paths_.Clear();
557 void HGlobalValueNumberingPhase::ComputeBlockSideEffects() {
558 for (
int i = graph()->blocks()->length() - 1; i >= 0; --i) {
560 HBasicBlock* block = graph()->blocks()->at(i);
561 SideEffects side_effects;
562 if (block->IsReachable() && !block->IsDeoptimizing()) {
563 int id = block->block_id();
564 for (HInstructionIterator it(block); !it.Done(); it.Advance()) {
565 HInstruction* instr = it.Current();
566 side_effects.Add(side_effects_tracker_.ComputeChanges(instr));
568 block_side_effects_[id].Add(side_effects);
571 if (block->IsLoopHeader()) {
572 loop_side_effects_[id].Add(side_effects);
576 if (block->HasParentLoopHeader()) {
577 HBasicBlock* with_parent = block;
578 if (block->IsLoopHeader()) side_effects = loop_side_effects_[
id];
580 HBasicBlock* parent_block = with_parent->parent_loop_header();
581 loop_side_effects_[parent_block->block_id()].Add(side_effects);
582 with_parent = parent_block;
583 }
while (with_parent->HasParentLoopHeader());
590 void HGlobalValueNumberingPhase::LoopInvariantCodeMotion() {
591 TRACE_GVN_1(
"Using optimistic loop invariant code motion: %s\n",
592 graph()->use_optimistic_licm() ?
"yes" :
"no");
593 for (
int i = graph()->blocks()->length() - 1; i >= 0; --i) {
594 HBasicBlock* block = graph()->blocks()->at(i);
595 if (block->IsLoopHeader()) {
596 SideEffects side_effects = loop_side_effects_[block->block_id()];
597 if (FLAG_trace_gvn) {
598 HeapStringAllocator allocator;
599 StringStream stream(&allocator);
600 stream.Add(
"Try loop invariant motion for block B%d changes ",
602 side_effects_tracker_.PrintSideEffectsTo(&stream, side_effects);
604 stream.OutputToStdOut();
606 HBasicBlock* last = block->loop_information()->GetLastBackEdge();
607 for (
int j = block->block_id(); j <= last->block_id(); ++j) {
608 ProcessLoopBlock(graph()->blocks()->at(j), block, side_effects);
615 void HGlobalValueNumberingPhase::ProcessLoopBlock(
617 HBasicBlock* loop_header,
618 SideEffects loop_kills) {
619 HBasicBlock* pre_header = loop_header->predecessors()->at(0);
620 if (FLAG_trace_gvn) {
621 HeapStringAllocator allocator;
622 StringStream stream(&allocator);
623 stream.Add(
"Loop invariant code motion for B%d depends on ",
625 side_effects_tracker_.PrintSideEffectsTo(&stream, loop_kills);
627 stream.OutputToStdOut();
629 HInstruction* instr = block->first();
630 while (instr !=
NULL) {
631 HInstruction* next = instr->next();
632 if (instr->CheckFlag(HValue::kUseGVN)) {
633 SideEffects changes = side_effects_tracker_.ComputeChanges(instr);
634 SideEffects depends_on = side_effects_tracker_.ComputeDependsOn(instr);
635 if (FLAG_trace_gvn) {
636 HeapStringAllocator allocator;
637 StringStream stream(&allocator);
638 stream.Add(
"Checking instruction i%d (%s) changes ",
639 instr->id(), instr->Mnemonic());
640 side_effects_tracker_.PrintSideEffectsTo(&stream, changes);
641 stream.Add(
", depends on ");
642 side_effects_tracker_.PrintSideEffectsTo(&stream, depends_on);
643 stream.Add(
". Loop changes ");
644 side_effects_tracker_.PrintSideEffectsTo(&stream, loop_kills);
646 stream.OutputToStdOut();
648 bool can_hoist = !depends_on.ContainsAnyOf(loop_kills);
649 if (can_hoist && !graph()->use_optimistic_licm()) {
650 can_hoist = block->IsLoopSuccessorDominator();
654 bool inputs_loop_invariant =
true;
655 for (
int i = 0; i < instr->OperandCount(); ++i) {
656 if (instr->OperandAt(i)->IsDefinedAfter(pre_header)) {
657 inputs_loop_invariant =
false;
661 if (inputs_loop_invariant && ShouldMove(instr, loop_header)) {
662 TRACE_GVN_2(
"Hoisting loop invariant instruction i%d to block B%d\n",
663 instr->id(), pre_header->block_id());
666 instr->InsertBefore(pre_header->end());
667 if (instr->HasSideEffects()) removed_side_effects_ =
true;
676 bool HGlobalValueNumberingPhase::AllowCodeMotion() {
677 return info()->IsStub() ||
info()->opt_count() + 1 < FLAG_max_opt_count;
681 bool HGlobalValueNumberingPhase::ShouldMove(HInstruction* instr,
682 HBasicBlock* loop_header) {
685 return AllowCodeMotion() && !instr->block()->IsDeoptimizing() &&
686 instr->block()->IsReachable();
691 HGlobalValueNumberingPhase::CollectSideEffectsOnPathsToDominatedBlock(
692 HBasicBlock* dominator, HBasicBlock* dominated) {
693 SideEffects side_effects;
694 for (
int i = 0; i < dominated->predecessors()->length(); ++i) {
695 HBasicBlock* block = dominated->predecessors()->at(i);
696 if (dominator->block_id() < block->block_id() &&
697 block->block_id() < dominated->block_id() &&
698 !visited_on_paths_.Contains(block->block_id())) {
699 visited_on_paths_.Add(block->block_id());
700 side_effects.Add(block_side_effects_[block->block_id()]);
701 if (block->IsLoopHeader()) {
702 side_effects.Add(loop_side_effects_[block->block_id()]);
704 side_effects.Add(CollectSideEffectsOnPathsToDominatedBlock(
721 HBasicBlock* entry_block,
722 HInstructionMap* entry_map) {
727 HBasicBlock*
block() {
return block_; }
728 HInstructionMap*
map() {
return map_; }
733 HBasicBlock** dominator) {
736 *dominator = block();
738 if (result ==
NULL) {
740 if (dominator_state !=
NULL) {
743 *dominator = dominator_state->
block();
744 result = dominator_state->next_dominated(zone);
754 void Initialize(HBasicBlock* block,
755 HInstructionMap*
map,
756 HSideEffectMap* dominators,
760 map_ = copy_map ? map->Copy(zone) :
map;
761 dominated_index_ = -1;
762 length_ = block->dominated_blocks()->length();
763 if (dominators !=
NULL) {
764 dominators_ = *dominators;
767 bool is_done() {
return dominated_index_ >= length_; }
769 GvnBasicBlockState(GvnBasicBlockState* previous,
771 HInstructionMap* map,
772 HSideEffectMap* dominators,
774 : previous_(previous), next_(
NULL) {
775 Initialize(block, map, dominators,
true, zone);
778 GvnBasicBlockState* next_dominated(Zone* zone) {
780 if (dominated_index_ == length_ - 1) {
782 Initialize(block_->dominated_blocks()->at(dominated_index_),
788 }
else if (dominated_index_ < length_) {
789 return push(zone, block_->dominated_blocks()->at(dominated_index_));
795 GvnBasicBlockState* push(Zone* zone, HBasicBlock* block) {
798 new(zone) GvnBasicBlockState(
this, block,
map(), dominators(), zone);
800 next_->Initialize(block,
map(), dominators(),
true, zone);
804 GvnBasicBlockState* pop() {
805 GvnBasicBlockState* result = previous_;
806 while (result !=
NULL && result->is_done()) {
807 TRACE_GVN_2(
"Backtracking from block B%d to block b%d\n",
809 previous_->block()->block_id())
810 result = result->previous_;
815 GvnBasicBlockState* previous_;
816 GvnBasicBlockState* next_;
818 HInstructionMap* map_;
819 HSideEffectMap dominators_;
820 int dominated_index_;
829 void HGlobalValueNumberingPhase::AnalyzeGraph() {
830 HBasicBlock* entry_block = graph()->entry_block();
831 HInstructionMap* entry_map =
832 new(zone()) HInstructionMap(zone(), &side_effects_tracker_);
833 GvnBasicBlockState* current =
834 GvnBasicBlockState::CreateEntry(zone(), entry_block, entry_map);
836 while (current !=
NULL) {
837 HBasicBlock* block = current->block();
838 HInstructionMap* map = current->map();
839 HSideEffectMap* dominators = current->dominators();
843 block->IsLoopHeader() ?
" (loop header)" :
"");
846 if (block->IsLoopHeader()) {
847 map->Kill(loop_side_effects_[block->block_id()]);
848 dominators->Kill(loop_side_effects_[block->block_id()]);
852 for (HInstructionIterator it(block); !it.Done(); it.Advance()) {
853 HInstruction* instr = it.Current();
854 if (instr->CheckFlag(HValue::kTrackSideEffectDominators)) {
856 HValue* other = dominators->at(i);
857 GVNFlag flag = GVNFlagFromInt(i);
858 if (instr->DependsOnFlags().Contains(flag) && other !=
NULL) {
859 TRACE_GVN_5(
"Side-effect #%d in %d (%s) is dominated by %d (%s)\n",
865 if (instr->HandleSideEffectDominator(flag, other)) {
866 removed_side_effects_ =
true;
872 if (!instr->IsLinked())
continue;
874 SideEffects changes = side_effects_tracker_.ComputeChanges(instr);
875 if (!changes.IsEmpty()) {
879 dominators->Store(changes, instr);
880 if (FLAG_trace_gvn) {
881 HeapStringAllocator allocator;
882 StringStream stream(&allocator);
883 stream.Add(
"Instruction i%d changes ", instr->id());
884 side_effects_tracker_.PrintSideEffectsTo(&stream, changes);
886 stream.OutputToStdOut();
889 if (instr->CheckFlag(HValue::kUseGVN)) {
890 ASSERT(!instr->HasObservableSideEffects());
891 HInstruction* other = map->Lookup(instr);
893 ASSERT(instr->Equals(other) && other->Equals(instr));
894 TRACE_GVN_4(
"Replacing instruction i%d (%s) with i%d (%s)\n",
899 if (instr->HasSideEffects()) removed_side_effects_ =
true;
900 instr->DeleteAndReplaceWith(other);
902 map->Add(instr, zone());
907 HBasicBlock* dominator_block;
908 GvnBasicBlockState* next =
909 current->next_in_dominator_tree_traversal(zone(),
913 HBasicBlock* dominated = next->block();
914 HInstructionMap* successor_map = next->map();
915 HSideEffectMap* successor_dominators = next->dominators();
922 if ((!successor_map->IsEmpty() || !successor_dominators->IsEmpty()) &&
923 dominator_block->block_id() + 1 < dominated->block_id()) {
924 visited_on_paths_.Clear();
925 SideEffects side_effects_on_all_paths =
926 CollectSideEffectsOnPathsToDominatedBlock(dominator_block,
928 successor_map->Kill(side_effects_on_all_paths);
929 successor_dominators->Kill(side_effects_on_all_paths);
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter NULL
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter NULL
HInstructionMap(Zone *zone, SideEffectsTracker *side_effects_tracker)
#define TRACE_GVN_2(msg, a1, a2)
#define ASSERT(condition)
kInstanceClassNameOffset flag
HInstruction * operator[](int i) const
T * NewArray(size_t size)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf map
HInstruction * at(int i) const
#define TRACE_GVN_4(msg, a1, a2, a3, a4)
HInstructionMap * Copy(Zone *zone) const
#define GVN_TRACKED_FLAG_LIST(V)
#define TRACE_GVN_1(msg, a1)
GvnBasicBlockState * next_in_dominator_tree_traversal(Zone *zone, HBasicBlock **dominator)
static void VPrint(const char *format, va_list args)
static GvnBasicBlockState * CreateEntry(Zone *zone, HBasicBlock *entry_block, HInstructionMap *entry_map)
void Add(HInstruction *instr, Zone *zone)
#define GVN_UNTRACKED_FLAG_LIST(V)
#define TRACE_GVN_5(msg, a1, a2, a3, a4, a5)
#define DECLARE_FLAG(Type)
Handle< T > handle(T *t, Isolate *isolate)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function info
#define ASSERT_EQ(v1, v2)
HSideEffectMap * dominators()
void TraceGVN(const char *msg,...)