34 #if V8_TARGET_ARCH_IA32
36 #elif V8_TARGET_ARCH_X64
38 #elif V8_TARGET_ARCH_ARM64
40 #elif V8_TARGET_ARCH_ARM
42 #elif V8_TARGET_ARCH_MIPS
45 #error "Unknown architecture."
51 static inline LifetimePosition Min(LifetimePosition a, LifetimePosition b) {
52 return a.
Value() < b.Value() ? a : b;
56 static inline LifetimePosition Max(LifetimePosition a, LifetimePosition b) {
57 return a.
Value() > b.Value() ? a : b;
69 register_beneficial_(
true) {
70 if (operand_ !=
NULL && operand_->IsUnallocated()) {
80 return hint_ !=
NULL && !hint_->IsUnallocated();
90 return register_beneficial_;
106 void LiveRange::Verify()
const {
108 while (cur !=
NULL) {
116 bool LiveRange::HasOverlap(UseInterval* target)
const {
117 UseInterval* current_interval = first_interval_;
118 while (current_interval !=
NULL) {
120 if (current_interval->Contains(target->start()) ||
121 target->Contains(current_interval->start())) {
124 current_interval = current_interval->
next();
137 assigned_register_(kInvalidAssignment),
138 last_interval_(
NULL),
139 first_interval_(
NULL),
143 current_interval_(
NULL),
144 last_processed_use_(
NULL),
145 current_hint_operand_(
NULL),
146 spill_operand_(new(zone)
LOperand()),
147 spill_start_index_(
kMaxInt) { }
152 assigned_register_ = reg;
153 ConvertOperands(zone);
162 ConvertOperands(zone);
168 return !spill_operand_->IsIgnored();
173 ASSERT(!operand->IsUnallocated());
175 ASSERT(spill_operand_->IsIgnored());
184 use_pos = use_pos->
next();
186 last_processed_use_ = use_pos;
226 if (use_pos ==
NULL)
return true;
249 ASSERT(!op->IsUnallocated());
259 UseInterval* LiveRange::FirstSearchIntervalForPosition(
261 if (current_interval_ ==
NULL)
return first_interval_;
263 current_interval_ =
NULL;
264 return first_interval_;
266 return current_interval_;
270 void LiveRange::AdvanceLastProcessedMarker(
271 UseInterval* to_start_of, LifetimePosition but_not_past)
const {
272 if (to_start_of ==
NULL)
return;
273 if (to_start_of->start().Value() > but_not_past.Value())
return;
274 LifetimePosition start =
276 : current_interval_->start();
277 if (to_start_of->start().Value() > start.Value()) {
278 current_interval_ = to_start_of;
291 UseInterval* current = FirstSearchIntervalForPosition(position);
295 bool split_at_start =
false;
297 if (current->start().Value() == position.
Value()) {
299 current = first_interval_;
302 while (current !=
NULL) {
303 if (current->Contains(position)) {
304 current->
SplitAt(position, zone);
318 result->last_interval_ = (last_interval_ == before)
321 result->first_interval_ = after;
322 last_interval_ = before;
328 if (split_at_start) {
333 use_before = use_after;
334 use_after = use_after->
next();
338 use_before = use_after;
339 use_after = use_after->
next();
344 if (use_before !=
NULL) {
345 use_before->next_ =
NULL;
349 result->first_pos_ = use_after;
353 last_processed_use_ =
NULL;
354 current_interval_ =
NULL;
358 result->parent_ = (parent_ ==
NULL) ?
this : parent_;
359 result->kind_ = result->parent_->kind_;
360 result->next_ = next_;
380 if (pos ==
NULL)
return false;
382 if (other_pos ==
NULL)
return true;
390 LAllocator::TraceAlloc(
"Shorten live range %d to [%d\n", id_, start.
Value());
394 first_interval_->set_start(start);
401 LAllocator::TraceAlloc(
"Ensure live range %d in interval [%d %d[\n",
406 while (first_interval_ !=
NULL &&
409 new_end = first_interval_->
end();
411 first_interval_ = first_interval_->
next();
415 new_interval->next_ = first_interval_;
416 first_interval_ = new_interval;
418 last_interval_ = new_interval;
426 LAllocator::TraceAlloc(
"Add to live range %d interval [%d %d[\n",
430 if (first_interval_ ==
NULL) {
432 first_interval_ = interval;
433 last_interval_ = interval;
436 first_interval_->set_start(start);
439 interval->set_next(first_interval_);
440 first_interval_ = interval;
446 first_interval_->start_ = Min(start, first_interval_->start_);
447 first_interval_->end_ = Max(end, first_interval_->end_);
457 LAllocator::TraceAlloc(
"Add to live range %d use position %d\n",
465 prev_hint = current->
HasHint() ? current : prev_hint;
467 current = current->
next();
471 use_pos->set_next(first_pos_);
472 first_pos_ = use_pos;
474 use_pos->next_ = prev->next_;
475 prev->next_ = use_pos;
478 if (prev_hint ==
NULL && use_pos->HasHint()) {
479 current_hint_operand_ = hint;
484 void LiveRange::ConvertOperands(
Zone* zone) {
487 while (use_pos !=
NULL) {
492 ASSERT(op->IsRegister() || op->IsDoubleRegister() ||
496 use_pos = use_pos->
next();
509 if (!
CanCover(position))
return false;
510 UseInterval* start_search = FirstSearchIntervalForPosition(position);
513 interval = interval->next()) {
515 interval->next()->start().Value() >= interval->start().Value());
516 AdvanceLastProcessedMarker(interval, position);
517 if (interval->Contains(position))
return true;
518 if (interval->start().Value() > position.
Value())
return false;
533 if (cur_intersection.
IsValid()) {
534 return cur_intersection;
539 AdvanceLastProcessedMarker(a, advance_last_processed_up_to);
548 LAllocator::LAllocator(
int num_values, HGraph* graph)
549 : zone_(graph->isolate()),
551 live_in_sets_(graph->blocks()->length(), zone()),
552 live_ranges_(num_values * 2, zone()),
553 fixed_live_ranges_(
NULL),
554 fixed_double_live_ranges_(
NULL),
555 unhandled_live_ranges_(num_values * 2, zone()),
556 active_live_ranges_(8, zone()),
557 inactive_live_ranges_(8, zone()),
558 reusable_slots_(8, zone()),
559 next_virtual_register_(num_values),
560 first_artificial_register_(num_values),
564 has_osr_entry_(
false),
565 allocation_ok_(
true) { }
568 void LAllocator::InitializeLivenessAnalysis() {
570 int block_count = graph_->blocks()->length();
571 live_in_sets_.Initialize(block_count, zone());
572 live_in_sets_.AddBlock(
NULL, block_count, zone());
576 BitVector* LAllocator::ComputeLiveOut(HBasicBlock* block) {
579 BitVector* live_out =
new(zone()) BitVector(next_virtual_register_, zone());
582 for (HSuccessorIterator it(block->end()); !it.Done(); it.Advance()) {
585 HBasicBlock* successor = it.Current();
586 BitVector* live_in = live_in_sets_[successor->block_id()];
587 if (live_in !=
NULL) live_out->Union(*live_in);
591 int index = successor->PredecessorIndexOf(block);
592 const ZoneList<HPhi*>* phis = successor->phis();
593 for (
int i = 0; i < phis->length(); ++i) {
594 HPhi* phi = phis->at(i);
595 if (!phi->OperandAt(index)->IsConstant()) {
596 live_out->Add(phi->OperandAt(index)->id());
605 void LAllocator::AddInitialIntervals(HBasicBlock* block,
606 BitVector* live_out) {
610 block->first_instruction_index());
612 block->last_instruction_index()).NextInstruction();
613 BitVector::Iterator iterator(live_out);
614 while (!iterator.Done()) {
615 int operand_index = iterator.Current();
616 LiveRange* range = LiveRangeFor(operand_index);
617 range->AddUseInterval(start, end, zone());
623 int LAllocator::FixedDoubleLiveRangeID(
int index) {
628 LOperand* LAllocator::AllocateFixed(LUnallocated* operand,
631 TraceAlloc(
"Allocating fixed reg for op %d\n", operand->virtual_register());
632 ASSERT(operand->HasFixedPolicy());
633 if (operand->HasFixedSlotPolicy()) {
635 }
else if (operand->HasFixedRegisterPolicy()) {
636 int reg_index = operand->fixed_register_index();
638 }
else if (operand->HasFixedDoubleRegisterPolicy()) {
639 int reg_index = operand->fixed_register_index();
645 TraceAlloc(
"Fixed reg is tagged at %d\n", pos);
646 LInstruction* instr = InstructionAt(pos);
647 if (instr->HasPointerMap()) {
648 instr->pointer_map()->RecordPointer(operand, chunk()->zone());
655 LiveRange* LAllocator::FixedLiveRangeFor(
int index) {
657 LiveRange* result = fixed_live_ranges_[index];
658 if (result ==
NULL) {
659 result =
new(zone()) LiveRange(FixedLiveRangeID(index), chunk()->zone());
660 ASSERT(result->IsFixed());
662 SetLiveRangeAssignedRegister(result, index);
663 fixed_live_ranges_[index] = result;
669 LiveRange* LAllocator::FixedDoubleLiveRangeFor(
int index) {
671 LiveRange* result = fixed_double_live_ranges_[index];
672 if (result ==
NULL) {
673 result =
new(zone()) LiveRange(FixedDoubleLiveRangeID(index),
675 ASSERT(result->IsFixed());
677 SetLiveRangeAssignedRegister(result, index);
678 fixed_double_live_ranges_[index] = result;
684 LiveRange* LAllocator::LiveRangeFor(
int index) {
685 if (index >= live_ranges_.length()) {
686 live_ranges_.AddBlock(
NULL, index - live_ranges_.length() + 1, zone());
688 LiveRange* result = live_ranges_[index];
689 if (result ==
NULL) {
690 result =
new(zone()) LiveRange(index, chunk()->zone());
691 live_ranges_[index] = result;
697 LGap* LAllocator::GetLastGap(HBasicBlock* block) {
698 int last_instruction = block->last_instruction_index();
699 int index = chunk_->NearestGapPos(last_instruction);
704 HPhi* LAllocator::LookupPhi(LOperand* operand)
const {
705 if (!operand->IsUnallocated())
return NULL;
707 HValue* instr = graph_->LookupValue(index);
708 if (instr !=
NULL && instr->IsPhi()) {
709 return HPhi::cast(instr);
715 LiveRange* LAllocator::LiveRangeFor(LOperand* operand) {
716 if (operand->IsUnallocated()) {
718 }
else if (operand->IsRegister()) {
719 return FixedLiveRangeFor(operand->index());
720 }
else if (operand->IsDoubleRegister()) {
721 return FixedDoubleLiveRangeFor(operand->index());
728 void LAllocator::Define(LifetimePosition position,
731 LiveRange* range = LiveRangeFor(operand);
732 if (range ==
NULL)
return;
734 if (range->IsEmpty() || range->Start().Value() > position.Value()) {
736 range->AddUseInterval(position, position.NextInstruction(), zone());
737 range->AddUsePosition(position.NextInstruction(),
NULL,
NULL, zone());
739 range->ShortenTo(position);
742 if (operand->IsUnallocated()) {
744 range->AddUsePosition(position, unalloc_operand, hint, zone());
749 void LAllocator::Use(LifetimePosition block_start,
750 LifetimePosition position,
753 LiveRange* range = LiveRangeFor(operand);
754 if (range ==
NULL)
return;
755 if (operand->IsUnallocated()) {
757 range->AddUsePosition(position, unalloc_operand, hint, zone());
759 range->AddUseInterval(block_start, position, zone());
763 void LAllocator::AddConstraintsGapMove(
int index,
766 LGap* gap = GapAt(index);
767 LParallelMove* move = gap->GetOrCreateParallelMove(
LGap::START,
769 if (from->IsUnallocated()) {
770 const ZoneList<LMoveOperands>* move_operands = move->move_operands();
771 for (
int i = 0; i < move_operands->length(); ++i) {
772 LMoveOperands cur = move_operands->at(i);
773 LOperand* cur_to = cur.destination();
774 if (cur_to->IsUnallocated()) {
777 move->AddMove(cur.source(), to, chunk()->zone());
783 move->AddMove(from, to, chunk()->zone());
787 void LAllocator::MeetRegisterConstraints(HBasicBlock* block) {
788 int start = block->first_instruction_index();
789 int end = block->last_instruction_index();
790 if (start == -1)
return;
791 for (
int i = start; i <= end; ++i) {
793 LInstruction* instr =
NULL;
794 LInstruction* prev_instr =
NULL;
795 if (i < end) instr = InstructionAt(i + 1);
796 if (i > start) prev_instr = InstructionAt(i - 1);
797 MeetConstraintsBetween(prev_instr, instr, i);
798 if (!AllocationOk())
return;
804 void LAllocator::MeetConstraintsBetween(LInstruction* first,
805 LInstruction* second,
809 for (TempIterator it(first); !it.Done(); it.Advance()) {
811 if (temp->HasFixedPolicy()) {
812 AllocateFixed(temp, gap_index - 1,
false);
818 if (first !=
NULL && first->Output() !=
NULL) {
820 LiveRange* range = LiveRangeFor(first_output->virtual_register());
821 bool assigned =
false;
822 if (first_output->HasFixedPolicy()) {
825 bool is_tagged = HasTaggedValue(first_output->virtual_register());
826 AllocateFixed(first_output, gap_index, is_tagged);
829 if (first_output->IsStackSlot()) {
830 range->SetSpillOperand(first_output);
831 range->SetSpillStartIndex(gap_index - 1);
834 chunk_->AddGapMove(gap_index, first_output, output_copy);
838 range->SetSpillStartIndex(gap_index);
844 LGap* gap = GapAt(gap_index);
845 LParallelMove* move = gap->GetOrCreateParallelMove(
LGap::BEFORE,
847 move->AddMove(first_output, range->GetSpillOperand(),
853 if (second !=
NULL) {
854 for (UseIterator it(second); !it.Done(); it.Advance()) {
856 if (cur_input->HasFixedPolicy()) {
859 bool is_tagged = HasTaggedValue(cur_input->virtual_register());
860 AllocateFixed(cur_input, gap_index + 1, is_tagged);
861 AddConstraintsGapMove(gap_index, input_copy, cur_input);
862 }
else if (cur_input->HasWritableRegisterPolicy()) {
865 ASSERT(!cur_input->IsUsedAtStart());
867 LUnallocated* input_copy = cur_input->CopyUnconstrained(
869 int vreg = GetVirtualRegister();
870 if (!AllocationOk())
return;
871 cur_input->set_virtual_register(vreg);
873 if (RequiredRegisterKind(input_copy->virtual_register()) ==
875 double_artificial_registers_.Add(
876 cur_input->virtual_register() - first_artificial_register_,
880 AddConstraintsGapMove(gap_index, input_copy, cur_input);
886 if (second !=
NULL && second->Output() !=
NULL) {
888 if (second_output->HasSameAsInputPolicy()) {
891 int input_vreg = cur_input->virtual_register();
893 LUnallocated* input_copy = cur_input->CopyUnconstrained(
895 cur_input->set_virtual_register(second_output->virtual_register());
896 AddConstraintsGapMove(gap_index, input_copy, cur_input);
898 if (HasTaggedValue(input_vreg) && !HasTaggedValue(output_vreg)) {
899 int index = gap_index + 1;
900 LInstruction* instr = InstructionAt(index);
901 if (instr->HasPointerMap()) {
902 instr->pointer_map()->RecordPointer(input_copy, chunk()->zone());
904 }
else if (!HasTaggedValue(input_vreg) && HasTaggedValue(output_vreg)) {
917 void LAllocator::ProcessInstructions(HBasicBlock* block, BitVector* live) {
918 int block_start = block->first_instruction_index();
919 int index = block->last_instruction_index();
921 LifetimePosition block_start_position =
924 while (index >= block_start) {
925 LifetimePosition curr_position =
928 if (IsGapAt(index)) {
930 LGap* gap = GapAt(index);
931 LParallelMove* move = gap->GetOrCreateParallelMove(
LGap::START,
933 const ZoneList<LMoveOperands>* move_operands = move->move_operands();
934 for (
int i = 0; i < move_operands->length(); ++i) {
935 LMoveOperands* cur = &move_operands->at(i);
936 if (cur->IsIgnored())
continue;
937 LOperand* from = cur->source();
938 LOperand* to = cur->destination();
939 HPhi* phi = LookupPhi(to);
943 if (!phi->block()->IsLoopHeader()) {
944 hint = LiveRangeFor(phi->id())->current_hint_operand();
947 if (to->IsUnallocated()) {
949 Define(curr_position, to, from);
956 Define(curr_position, to, from);
959 Use(block_start_position, curr_position, from, hint);
960 if (from->IsUnallocated()) {
966 LInstruction* instr = InstructionAt(index);
969 LOperand* output = instr->Output();
970 if (output !=
NULL) {
971 if (output->IsUnallocated()) {
974 Define(curr_position, output,
NULL);
977 if (instr->ClobbersRegisters()) {
979 if (output ==
NULL || !output->IsRegister() ||
980 output->index() != i) {
981 LiveRange* range = FixedLiveRangeFor(i);
982 range->AddUseInterval(curr_position,
983 curr_position.InstructionEnd(),
989 if (instr->ClobbersDoubleRegisters()) {
991 if (output ==
NULL || !output->IsDoubleRegister() ||
992 output->index() != i) {
993 LiveRange* range = FixedDoubleLiveRangeFor(i);
994 range->AddUseInterval(curr_position,
995 curr_position.InstructionEnd(),
1001 for (UseIterator it(instr); !it.Done(); it.Advance()) {
1002 LOperand* input = it.Current();
1004 LifetimePosition use_pos;
1005 if (input->IsUnallocated() &&
1007 use_pos = curr_position;
1009 use_pos = curr_position.InstructionEnd();
1012 Use(block_start_position, use_pos, input,
NULL);
1013 if (input->IsUnallocated()) {
1018 for (TempIterator it(instr); !it.Done(); it.Advance()) {
1019 LOperand* temp = it.Current();
1020 if (instr->ClobbersTemps()) {
1021 if (temp->IsRegister())
continue;
1022 if (temp->IsUnallocated()) {
1024 if (temp_unalloc->HasFixedPolicy()) {
1029 Use(block_start_position, curr_position.InstructionEnd(), temp,
NULL);
1030 Define(curr_position, temp,
NULL);
1040 void LAllocator::ResolvePhis(HBasicBlock* block) {
1041 const ZoneList<HPhi*>* phis = block->phis();
1042 for (
int i = 0; i < phis->length(); ++i) {
1043 HPhi* phi = phis->at(i);
1044 LUnallocated* phi_operand =
1046 phi_operand->set_virtual_register(phi->id());
1047 for (
int j = 0; j < phi->OperandCount(); ++j) {
1048 HValue* op = phi->OperandAt(j);
1049 LOperand* operand =
NULL;
1050 if (op->IsConstant() && op->EmitAtUses()) {
1051 HConstant* constant = HConstant::cast(op);
1052 operand = chunk_->DefineConstantOperand(constant);
1054 ASSERT(!op->EmitAtUses());
1055 LUnallocated* unalloc =
1057 unalloc->set_virtual_register(op->id());
1060 HBasicBlock* cur_block = block->predecessors()->at(j);
1063 chunk_->AddGapMove(cur_block->last_instruction_index() - 1,
1075 LInstruction* branch =
1076 InstructionAt(cur_block->last_instruction_index());
1077 if (branch->HasPointerMap()) {
1078 if (phi->representation().IsTagged() && !phi->type().IsSmi()) {
1079 branch->pointer_map()->RecordPointer(phi_operand, chunk()->zone());
1080 }
else if (!phi->representation().IsDouble()) {
1081 branch->pointer_map()->RecordUntagged(phi_operand, chunk()->zone());
1086 LiveRange* live_range = LiveRangeFor(phi->id());
1087 LLabel* label = chunk_->GetLabel(phi->block()->block_id());
1088 label->GetOrCreateParallelMove(
LGap::START, chunk()->zone())->
1089 AddMove(phi_operand, live_range->GetSpillOperand(), chunk()->zone());
1090 live_range->SetSpillStartIndex(phi->block()->first_instruction_index());
1095 bool LAllocator::Allocate(LChunk* chunk) {
1097 chunk_ =
static_cast<LPlatformChunk*
>(chunk);
1098 assigned_registers_ =
1101 assigned_double_registers_ =
1104 MeetRegisterConstraints();
1105 if (!AllocationOk())
return false;
1108 AllocateGeneralRegisters();
1109 if (!AllocationOk())
return false;
1110 AllocateDoubleRegisters();
1111 if (!AllocationOk())
return false;
1112 PopulatePointerMaps();
1114 ResolveControlFlow();
1119 void LAllocator::MeetRegisterConstraints() {
1120 LAllocatorPhase phase(
"L_Register constraints",
this);
1121 first_artificial_register_ = next_virtual_register_;
1122 const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
1123 for (
int i = 0; i < blocks->length(); ++i) {
1124 HBasicBlock* block = blocks->at(i);
1125 MeetRegisterConstraints(block);
1126 if (!AllocationOk())
return;
1131 void LAllocator::ResolvePhis() {
1132 LAllocatorPhase phase(
"L_Resolve phis",
this);
1135 const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
1136 for (
int block_id = blocks->length() - 1; block_id >= 0; --block_id) {
1137 HBasicBlock* block = blocks->at(block_id);
1143 void LAllocator::ResolveControlFlow(LiveRange* range,
1145 HBasicBlock* pred) {
1146 LifetimePosition pred_end =
1148 LifetimePosition cur_start =
1150 LiveRange* pred_cover =
NULL;
1151 LiveRange* cur_cover =
NULL;
1152 LiveRange* cur_range = range;
1153 while (cur_range !=
NULL && (cur_cover ==
NULL || pred_cover ==
NULL)) {
1154 if (cur_range->CanCover(cur_start)) {
1156 cur_cover = cur_range;
1158 if (cur_range->CanCover(pred_end)) {
1160 pred_cover = cur_range;
1162 cur_range = cur_range->next();
1165 if (cur_cover->IsSpilled())
return;
1167 if (pred_cover != cur_cover) {
1168 LOperand* pred_op = pred_cover->CreateAssignedOperand(chunk()->zone());
1169 LOperand* cur_op = cur_cover->CreateAssignedOperand(chunk()->zone());
1170 if (!pred_op->Equals(cur_op)) {
1172 if (block->predecessors()->length() == 1) {
1173 gap = GapAt(block->first_instruction_index());
1175 ASSERT(pred->end()->SecondSuccessor() ==
NULL);
1176 gap = GetLastGap(pred);
1186 LInstruction* branch = InstructionAt(pred->last_instruction_index());
1187 if (branch->HasPointerMap()) {
1188 if (HasTaggedValue(range->id())) {
1189 branch->pointer_map()->RecordPointer(cur_op, chunk()->zone());
1190 }
else if (!cur_op->IsDoubleStackSlot() &&
1191 !cur_op->IsDoubleRegister()) {
1192 branch->pointer_map()->RemovePointer(cur_op);
1196 gap->GetOrCreateParallelMove(
1197 LGap::START, chunk()->zone())->AddMove(pred_op, cur_op,
1204 LParallelMove* LAllocator::GetConnectingParallelMove(LifetimePosition pos) {
1205 int index = pos.InstructionIndex();
1206 if (IsGapAt(index)) {
1207 LGap* gap = GapAt(index);
1208 return gap->GetOrCreateParallelMove(
1211 int gap_pos = pos.IsInstructionStart() ? (index - 1) : (index + 1);
1212 return GapAt(gap_pos)->GetOrCreateParallelMove(
1217 HBasicBlock* LAllocator::GetBlock(LifetimePosition pos) {
1218 LGap* gap = GapAt(chunk_->NearestGapPos(pos.InstructionIndex()));
1219 return gap->block();
1223 void LAllocator::ConnectRanges() {
1224 LAllocatorPhase phase(
"L_Connect ranges",
this);
1225 for (
int i = 0; i < live_ranges()->length(); ++i) {
1226 LiveRange* first_range = live_ranges()->at(i);
1227 if (first_range ==
NULL || first_range->parent() !=
NULL)
continue;
1229 LiveRange* second_range = first_range->next();
1230 while (second_range !=
NULL) {
1231 LifetimePosition pos = second_range->Start();
1233 if (!second_range->IsSpilled()) {
1236 if (first_range->End().Value() == pos.Value()) {
1237 bool should_insert =
true;
1238 if (IsBlockBoundary(pos)) {
1239 should_insert = CanEagerlyResolveControlFlow(GetBlock(pos));
1241 if (should_insert) {
1242 LParallelMove* move = GetConnectingParallelMove(pos);
1243 LOperand* prev_operand = first_range->CreateAssignedOperand(
1245 LOperand* cur_operand = second_range->CreateAssignedOperand(
1247 move->AddMove(prev_operand, cur_operand,
1253 first_range = second_range;
1254 second_range = second_range->next();
1260 bool LAllocator::CanEagerlyResolveControlFlow(HBasicBlock* block)
const {
1261 if (block->predecessors()->length() != 1)
return false;
1262 return block->predecessors()->first()->block_id() == block->block_id() - 1;
1266 void LAllocator::ResolveControlFlow() {
1267 LAllocatorPhase phase(
"L_Resolve control flow",
this);
1268 const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
1269 for (
int block_id = 1; block_id < blocks->length(); ++block_id) {
1270 HBasicBlock* block = blocks->at(block_id);
1271 if (CanEagerlyResolveControlFlow(block))
continue;
1272 BitVector* live = live_in_sets_[block->block_id()];
1273 BitVector::Iterator iterator(live);
1274 while (!iterator.Done()) {
1275 int operand_index = iterator.Current();
1276 for (
int i = 0; i < block->predecessors()->length(); ++i) {
1277 HBasicBlock* cur = block->predecessors()->at(i);
1278 LiveRange* cur_range = LiveRangeFor(operand_index);
1279 ResolveControlFlow(cur_range, block, cur);
1287 void LAllocator::BuildLiveRanges() {
1288 LAllocatorPhase phase(
"L_Build live ranges",
this);
1289 InitializeLivenessAnalysis();
1291 const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
1292 for (
int block_id = blocks->length() - 1; block_id >= 0; --block_id) {
1293 HBasicBlock* block = blocks->at(block_id);
1294 BitVector* live = ComputeLiveOut(block);
1297 AddInitialIntervals(block, live);
1301 ProcessInstructions(block, live);
1303 const ZoneList<HPhi*>* phis = block->phis();
1304 for (
int i = 0; i < phis->length(); ++i) {
1307 HPhi* phi = phis->at(i);
1308 live->Remove(phi->id());
1310 LOperand* hint =
NULL;
1311 LOperand* phi_operand =
NULL;
1312 LGap* gap = GetLastGap(phi->block()->predecessors()->at(0));
1313 LParallelMove* move = gap->GetOrCreateParallelMove(
LGap::START,
1315 for (
int j = 0; j < move->move_operands()->length(); ++j) {
1316 LOperand* to = move->move_operands()->at(j).destination();
1317 if (to->IsUnallocated() &&
1319 hint = move->move_operands()->at(j).source();
1327 block->first_instruction_index());
1328 Define(block_start, phi_operand, hint);
1333 live_in_sets_[block_id] = live;
1337 if (block->IsLoopHeader()) {
1342 HBasicBlock* back_edge = block->loop_information()->GetLastBackEdge();
1343 BitVector::Iterator iterator(live);
1345 block->first_instruction_index());
1347 back_edge->last_instruction_index()).NextInstruction();
1348 while (!iterator.Done()) {
1349 int operand_index = iterator.Current();
1350 LiveRange* range = LiveRangeFor(operand_index);
1351 range->EnsureInterval(start, end, zone());
1355 for (
int i = block->block_id() + 1; i <= back_edge->block_id(); ++i) {
1356 live_in_sets_[i]->Union(*live);
1361 if (block_id == 0) {
1362 BitVector::Iterator iterator(live);
1364 while (!iterator.Done()) {
1366 int operand_index = iterator.Current();
1367 if (chunk_->info()->IsStub()) {
1368 CodeStub::Major major_key = chunk_->info()->code_stub()->MajorKey();
1369 PrintF(
"Function: %s\n", CodeStub::MajorName(major_key,
false));
1371 ASSERT(chunk_->info()->IsOptimizing());
1374 chunk_->info()->function()->debug_name()->ToCString().get());
1376 PrintF(
"Value %d used before first definition!\n", operand_index);
1377 LiveRange* range = LiveRangeFor(operand_index);
1378 PrintF(
"First use is at %d\n", range->first_pos()->pos().Value());
1386 for (
int i = 0; i < live_ranges_.length(); ++i) {
1387 if (live_ranges_[i] !=
NULL) {
1388 live_ranges_[i]->kind_ = RequiredRegisterKind(live_ranges_[i]->
id());
1394 bool LAllocator::SafePointsAreInOrder()
const {
1395 const ZoneList<LPointerMap*>* pointer_maps = chunk_->pointer_maps();
1397 for (
int i = 0; i < pointer_maps->length(); ++i) {
1398 LPointerMap*
map = pointer_maps->at(i);
1399 if (safe_point > map->lithium_position())
return false;
1400 safe_point = map->lithium_position();
1406 void LAllocator::PopulatePointerMaps() {
1407 LAllocatorPhase phase(
"L_Populate pointer maps",
this);
1408 const ZoneList<LPointerMap*>* pointer_maps = chunk_->pointer_maps();
1410 ASSERT(SafePointsAreInOrder());
1414 int first_safe_point_index = 0;
1415 int last_range_start = 0;
1416 for (
int range_idx = 0; range_idx < live_ranges()->length(); ++range_idx) {
1417 LiveRange* range = live_ranges()->at(range_idx);
1418 if (range ==
NULL)
continue;
1420 if (range->parent() !=
NULL)
continue;
1422 if (!HasTaggedValue(range->id()))
continue;
1424 if (range->IsEmpty())
continue;
1427 int start = range->Start().InstructionIndex();
1429 for (LiveRange* cur = range; cur !=
NULL; cur = cur->next()) {
1430 LifetimePosition this_end = cur->End();
1431 if (this_end.InstructionIndex() > end) end = this_end.InstructionIndex();
1432 ASSERT(cur->Start().InstructionIndex() >= start);
1438 if (start < last_range_start) {
1439 first_safe_point_index = 0;
1441 last_range_start = start;
1445 while (first_safe_point_index < pointer_maps->length()) {
1446 LPointerMap* map = pointer_maps->at(first_safe_point_index);
1447 int safe_point = map->lithium_position();
1448 if (safe_point >= start)
break;
1449 first_safe_point_index++;
1453 for (
int safe_point_index = first_safe_point_index;
1454 safe_point_index < pointer_maps->length();
1455 ++safe_point_index) {
1456 LPointerMap* map = pointer_maps->at(safe_point_index);
1457 int safe_point = map->lithium_position();
1460 if (safe_point - 1 > end)
break;
1464 LifetimePosition safe_point_pos =
1466 LiveRange* cur = range;
1467 while (cur !=
NULL && !cur->Covers(safe_point_pos)) {
1470 if (cur ==
NULL)
continue;
1474 if (range->HasAllocatedSpillOperand() &&
1475 safe_point >= range->spill_start_index()) {
1476 TraceAlloc(
"Pointer for range %d (spilled at %d) at safe point %d\n",
1477 range->id(), range->spill_start_index(), safe_point);
1478 map->RecordPointer(range->GetSpillOperand(), chunk()->zone());
1481 if (!cur->IsSpilled()) {
1482 TraceAlloc(
"Pointer in register for range %d (start at %d) "
1483 "at safe point %d\n",
1484 cur->id(), cur->Start().Value(), safe_point);
1485 LOperand* operand = cur->CreateAssignedOperand(chunk()->zone());
1486 ASSERT(!operand->IsStackSlot());
1487 map->RecordPointer(operand, chunk()->zone());
1494 void LAllocator::AllocateGeneralRegisters() {
1495 LAllocatorPhase phase(
"L_Allocate general registers",
this);
1498 AllocateRegisters();
1502 void LAllocator::AllocateDoubleRegisters() {
1503 LAllocatorPhase phase(
"L_Allocate double registers",
this);
1506 AllocateRegisters();
1510 void LAllocator::AllocateRegisters() {
1511 ASSERT(unhandled_live_ranges_.is_empty());
1513 for (
int i = 0; i < live_ranges_.length(); ++i) {
1514 if (live_ranges_[i] !=
NULL) {
1515 if (live_ranges_[i]->Kind() == mode_) {
1516 AddToUnhandledUnsorted(live_ranges_[i]);
1521 ASSERT(UnhandledIsSorted());
1523 ASSERT(reusable_slots_.is_empty());
1524 ASSERT(active_live_ranges_.is_empty());
1525 ASSERT(inactive_live_ranges_.is_empty());
1529 LiveRange* current = fixed_double_live_ranges_.at(i);
1530 if (current !=
NULL) {
1531 AddToInactive(current);
1536 for (
int i = 0; i < fixed_live_ranges_.length(); ++i) {
1537 LiveRange* current = fixed_live_ranges_.at(i);
1538 if (current !=
NULL) {
1539 AddToInactive(current);
1544 while (!unhandled_live_ranges_.is_empty()) {
1545 ASSERT(UnhandledIsSorted());
1546 LiveRange* current = unhandled_live_ranges_.RemoveLast();
1547 ASSERT(UnhandledIsSorted());
1548 LifetimePosition position = current->Start();
1550 allocation_finger_ = position;
1552 TraceAlloc(
"Processing interval %d start=%d\n",
1556 if (current->HasAllocatedSpillOperand()) {
1557 TraceAlloc(
"Live range %d already has a spill operand\n", current->id());
1558 LifetimePosition next_pos = position;
1559 if (IsGapAt(next_pos.InstructionIndex())) {
1560 next_pos = next_pos.NextInstruction();
1562 UsePosition* pos = current->NextUsePositionRegisterIsBeneficial(next_pos);
1568 }
else if (pos->pos().Value() >
1569 current->Start().NextInstruction().Value()) {
1572 SpillBetween(current, current->Start(), pos->pos());
1573 if (!AllocationOk())
return;
1574 ASSERT(UnhandledIsSorted());
1579 for (
int i = 0; i < active_live_ranges_.length(); ++i) {
1580 LiveRange* cur_active = active_live_ranges_.at(i);
1581 if (cur_active->End().Value() <= position.Value()) {
1582 ActiveToHandled(cur_active);
1584 }
else if (!cur_active->Covers(position)) {
1585 ActiveToInactive(cur_active);
1590 for (
int i = 0; i < inactive_live_ranges_.length(); ++i) {
1591 LiveRange* cur_inactive = inactive_live_ranges_.at(i);
1592 if (cur_inactive->End().Value() <= position.Value()) {
1593 InactiveToHandled(cur_inactive);
1595 }
else if (cur_inactive->Covers(position)) {
1596 InactiveToActive(cur_inactive);
1601 ASSERT(!current->HasRegisterAssigned() && !current->IsSpilled());
1603 bool result = TryAllocateFreeReg(current);
1604 if (!AllocationOk())
return;
1606 if (!result) AllocateBlockedReg(current);
1607 if (!AllocationOk())
return;
1609 if (current->HasRegisterAssigned()) {
1610 AddToActive(current);
1614 reusable_slots_.Rewind(0);
1615 active_live_ranges_.Rewind(0);
1616 inactive_live_ranges_.Rewind(0);
1620 const char* LAllocator::RegisterName(
int allocation_index) {
1629 void LAllocator::TraceAlloc(
const char* msg, ...) {
1630 if (FLAG_trace_alloc) {
1632 va_start(arguments, msg);
1639 bool LAllocator::HasTaggedValue(
int virtual_register)
const {
1640 HValue* value = graph_->LookupValue(virtual_register);
1641 if (value ==
NULL)
return false;
1642 return value->representation().IsTagged() && !value->type().IsSmi();
1646 RegisterKind LAllocator::RequiredRegisterKind(
int virtual_register)
const {
1647 if (virtual_register < first_artificial_register_) {
1648 HValue* value = graph_->LookupValue(virtual_register);
1649 if (value !=
NULL && value->representation().IsDouble()) {
1652 }
else if (double_artificial_registers_.Contains(
1653 virtual_register - first_artificial_register_)) {
1661 void LAllocator::AddToActive(LiveRange* range) {
1662 TraceAlloc(
"Add live range %d to active\n", range->id());
1663 active_live_ranges_.Add(range, zone());
1667 void LAllocator::AddToInactive(LiveRange* range) {
1668 TraceAlloc(
"Add live range %d to inactive\n", range->id());
1669 inactive_live_ranges_.Add(range, zone());
1673 void LAllocator::AddToUnhandledSorted(LiveRange* range) {
1674 if (range ==
NULL || range->IsEmpty())
return;
1675 ASSERT(!range->HasRegisterAssigned() && !range->IsSpilled());
1676 ASSERT(allocation_finger_.Value() <= range->Start().Value());
1677 for (
int i = unhandled_live_ranges_.length() - 1; i >= 0; --i) {
1678 LiveRange* cur_range = unhandled_live_ranges_.at(i);
1679 if (range->ShouldBeAllocatedBefore(cur_range)) {
1680 TraceAlloc(
"Add live range %d to unhandled at %d\n", range->id(), i + 1);
1681 unhandled_live_ranges_.InsertAt(i + 1, range, zone());
1682 ASSERT(UnhandledIsSorted());
1686 TraceAlloc(
"Add live range %d to unhandled at start\n", range->id());
1687 unhandled_live_ranges_.InsertAt(0, range, zone());
1688 ASSERT(UnhandledIsSorted());
1692 void LAllocator::AddToUnhandledUnsorted(LiveRange* range) {
1693 if (range ==
NULL || range->IsEmpty())
return;
1694 ASSERT(!range->HasRegisterAssigned() && !range->IsSpilled());
1695 TraceAlloc(
"Add live range %d to unhandled unsorted at end\n", range->id());
1696 unhandled_live_ranges_.Add(range, zone());
1700 static int UnhandledSortHelper(LiveRange*
const* a, LiveRange*
const* b) {
1701 ASSERT(!(*a)->ShouldBeAllocatedBefore(*b) ||
1702 !(*b)->ShouldBeAllocatedBefore(*a));
1703 if ((*a)->ShouldBeAllocatedBefore(*b))
return 1;
1704 if ((*b)->ShouldBeAllocatedBefore(*a))
return -1;
1705 return (*a)->id() - (*b)->id();
1712 void LAllocator::SortUnhandled() {
1713 TraceAlloc(
"Sort unhandled\n");
1714 unhandled_live_ranges_.Sort(&UnhandledSortHelper);
1718 bool LAllocator::UnhandledIsSorted() {
1719 int len = unhandled_live_ranges_.length();
1720 for (
int i = 1; i < len; i++) {
1721 LiveRange* a = unhandled_live_ranges_.at(i - 1);
1722 LiveRange* b = unhandled_live_ranges_.at(i);
1723 if (a->Start().Value() < b->Start().Value())
return false;
1729 void LAllocator::FreeSpillSlot(LiveRange* range) {
1731 if (range->next() !=
NULL)
return;
1733 if (!range->TopLevel()->HasAllocatedSpillOperand())
return;
1735 int index = range->TopLevel()->GetSpillOperand()->index();
1737 reusable_slots_.Add(range, zone());
1742 LOperand* LAllocator::TryReuseSpillSlot(LiveRange* range) {
1743 if (reusable_slots_.is_empty())
return NULL;
1744 if (reusable_slots_.first()->End().Value() >
1745 range->TopLevel()->Start().Value()) {
1748 LOperand* result = reusable_slots_.first()->TopLevel()->GetSpillOperand();
1749 reusable_slots_.Remove(0);
1754 void LAllocator::ActiveToHandled(LiveRange* range) {
1755 ASSERT(active_live_ranges_.Contains(range));
1756 active_live_ranges_.RemoveElement(range);
1757 TraceAlloc(
"Moving live range %d from active to handled\n", range->id());
1758 FreeSpillSlot(range);
1762 void LAllocator::ActiveToInactive(LiveRange* range) {
1763 ASSERT(active_live_ranges_.Contains(range));
1764 active_live_ranges_.RemoveElement(range);
1765 inactive_live_ranges_.Add(range, zone());
1766 TraceAlloc(
"Moving live range %d from active to inactive\n", range->id());
1770 void LAllocator::InactiveToHandled(LiveRange* range) {
1771 ASSERT(inactive_live_ranges_.Contains(range));
1772 inactive_live_ranges_.RemoveElement(range);
1773 TraceAlloc(
"Moving live range %d from inactive to handled\n", range->id());
1774 FreeSpillSlot(range);
1778 void LAllocator::InactiveToActive(LiveRange* range) {
1779 ASSERT(inactive_live_ranges_.Contains(range));
1780 inactive_live_ranges_.RemoveElement(range);
1781 active_live_ranges_.Add(range, zone());
1782 TraceAlloc(
"Moving live range %d from inactive to active\n", range->id());
1789 Register::kMaxNumAllocatableRegisters);
1792 bool LAllocator::TryAllocateFreeReg(LiveRange* current) {
1795 for (
int i = 0; i < num_registers_; i++) {
1799 for (
int i = 0; i < active_live_ranges_.length(); ++i) {
1800 LiveRange* cur_active = active_live_ranges_.at(i);
1801 free_until_pos[cur_active->assigned_register()] =
1805 for (
int i = 0; i < inactive_live_ranges_.length(); ++i) {
1806 LiveRange* cur_inactive = inactive_live_ranges_.at(i);
1807 ASSERT(cur_inactive->End().Value() > current->Start().Value());
1808 LifetimePosition next_intersection =
1809 cur_inactive->FirstIntersection(current);
1810 if (!next_intersection.IsValid())
continue;
1811 int cur_reg = cur_inactive->assigned_register();
1812 free_until_pos[cur_reg] = Min(free_until_pos[cur_reg], next_intersection);
1815 LOperand* hint = current->FirstHint();
1816 if (hint !=
NULL && (hint->IsRegister() || hint->IsDoubleRegister())) {
1817 int register_index = hint->index();
1819 "Found reg hint %s (free until [%d) for live range %d (end %d[).\n",
1820 RegisterName(register_index),
1821 free_until_pos[register_index].Value(),
1823 current->End().Value());
1826 if (free_until_pos[register_index].Value() >= current->End().Value()) {
1827 TraceAlloc(
"Assigning preferred reg %s to live range %d\n",
1828 RegisterName(register_index),
1830 SetLiveRangeAssignedRegister(current, register_index);
1837 for (
int i = 1; i < RegisterCount(); ++i) {
1838 if (free_until_pos[i].Value() > free_until_pos[reg].Value()) {
1843 LifetimePosition pos = free_until_pos[reg];
1845 if (pos.Value() <= current->Start().Value()) {
1850 if (pos.Value() < current->End().Value()) {
1853 LiveRange* tail = SplitRangeAt(current, pos);
1854 if (!AllocationOk())
return false;
1855 AddToUnhandledSorted(tail);
1861 ASSERT(pos.Value() >= current->End().Value());
1862 TraceAlloc(
"Assigning free reg %s to live range %d\n",
1865 SetLiveRangeAssignedRegister(current, reg);
1871 void LAllocator::AllocateBlockedReg(LiveRange* current) {
1872 UsePosition* register_use = current->NextRegisterPosition(current->Start());
1873 if (register_use ==
NULL) {
1884 for (
int i = 0; i < num_registers_; i++) {
1888 for (
int i = 0; i < active_live_ranges_.length(); ++i) {
1889 LiveRange* range = active_live_ranges_[i];
1890 int cur_reg = range->assigned_register();
1891 if (range->IsFixed() || !range->CanBeSpilled(current->Start())) {
1892 block_pos[cur_reg] = use_pos[cur_reg] =
1895 UsePosition* next_use = range->NextUsePositionRegisterIsBeneficial(
1897 if (next_use ==
NULL) {
1898 use_pos[cur_reg] = range->End();
1900 use_pos[cur_reg] = next_use->pos();
1905 for (
int i = 0; i < inactive_live_ranges_.length(); ++i) {
1906 LiveRange* range = inactive_live_ranges_.at(i);
1907 ASSERT(range->End().Value() > current->Start().Value());
1908 LifetimePosition next_intersection = range->FirstIntersection(current);
1909 if (!next_intersection.IsValid())
continue;
1910 int cur_reg = range->assigned_register();
1911 if (range->IsFixed()) {
1912 block_pos[cur_reg] = Min(block_pos[cur_reg], next_intersection);
1913 use_pos[cur_reg] = Min(block_pos[cur_reg], use_pos[cur_reg]);
1915 use_pos[cur_reg] = Min(use_pos[cur_reg], next_intersection);
1920 for (
int i = 1; i < RegisterCount(); ++i) {
1921 if (use_pos[i].Value() > use_pos[reg].Value()) {
1926 LifetimePosition pos = use_pos[reg];
1928 if (pos.Value() < register_use->pos().Value()) {
1931 SpillBetween(current, current->Start(), register_use->pos());
1935 if (block_pos[reg].Value() < current->End().Value()) {
1938 LiveRange* tail = SplitBetween(current,
1940 block_pos[reg].InstructionStart());
1941 if (!AllocationOk())
return;
1942 AddToUnhandledSorted(tail);
1946 ASSERT(block_pos[reg].Value() >= current->End().Value());
1947 TraceAlloc(
"Assigning blocked reg %s to live range %d\n",
1950 SetLiveRangeAssignedRegister(current, reg);
1955 SplitAndSpillIntersecting(current);
1959 LifetimePosition LAllocator::FindOptimalSpillingPos(LiveRange* range,
1960 LifetimePosition pos) {
1961 HBasicBlock* block = GetBlock(pos.InstructionStart());
1962 HBasicBlock* loop_header =
1963 block->IsLoopHeader() ? block : block->parent_loop_header();
1965 if (loop_header ==
NULL)
return pos;
1967 UsePosition* prev_use =
1968 range->PreviousUsePositionRegisterIsBeneficial(pos);
1970 while (loop_header !=
NULL) {
1975 loop_header->first_instruction_index());
1977 if (range->Covers(loop_start)) {
1978 if (prev_use ==
NULL || prev_use->pos().Value() < loop_start.Value()) {
1985 loop_header = loop_header->parent_loop_header();
1992 void LAllocator::SplitAndSpillIntersecting(LiveRange* current) {
1993 ASSERT(current->HasRegisterAssigned());
1994 int reg = current->assigned_register();
1995 LifetimePosition split_pos = current->Start();
1996 for (
int i = 0; i < active_live_ranges_.length(); ++i) {
1997 LiveRange* range = active_live_ranges_[i];
1998 if (range->assigned_register() == reg) {
1999 UsePosition* next_pos = range->NextRegisterPosition(current->Start());
2000 LifetimePosition spill_pos = FindOptimalSpillingPos(range, split_pos);
2001 if (next_pos ==
NULL) {
2002 SpillAfter(range, spill_pos);
2012 SpillBetweenUntil(range, spill_pos, current->Start(), next_pos->pos());
2014 if (!AllocationOk())
return;
2015 ActiveToHandled(range);
2020 for (
int i = 0; i < inactive_live_ranges_.length(); ++i) {
2021 LiveRange* range = inactive_live_ranges_[i];
2022 ASSERT(range->End().Value() > current->Start().Value());
2023 if (range->assigned_register() == reg && !range->IsFixed()) {
2024 LifetimePosition next_intersection = range->FirstIntersection(current);
2025 if (next_intersection.IsValid()) {
2026 UsePosition* next_pos = range->NextRegisterPosition(current->Start());
2027 if (next_pos ==
NULL) {
2028 SpillAfter(range, split_pos);
2030 next_intersection = Min(next_intersection, next_pos->pos());
2031 SpillBetween(range, split_pos, next_intersection);
2033 if (!AllocationOk())
return;
2034 InactiveToHandled(range);
2042 bool LAllocator::IsBlockBoundary(LifetimePosition pos) {
2043 return pos.IsInstructionStart() &&
2044 InstructionAt(pos.InstructionIndex())->IsLabel();
2048 LiveRange* LAllocator::SplitRangeAt(LiveRange* range, LifetimePosition pos) {
2049 ASSERT(!range->IsFixed());
2050 TraceAlloc(
"Splitting live range %d at %d\n", range->id(), pos.Value());
2052 if (pos.Value() <= range->Start().Value())
return range;
2056 ASSERT(pos.IsInstructionStart() ||
2057 !chunk_->instructions()->at(pos.InstructionIndex())->IsControl());
2059 int vreg = GetVirtualRegister();
2060 if (!AllocationOk())
return NULL;
2061 LiveRange* result = LiveRangeFor(vreg);
2062 range->SplitAt(pos, result, zone());
2067 LiveRange* LAllocator::SplitBetween(LiveRange* range,
2068 LifetimePosition start,
2069 LifetimePosition end) {
2070 ASSERT(!range->IsFixed());
2071 TraceAlloc(
"Splitting live range %d in position between [%d, %d]\n",
2076 LifetimePosition split_pos = FindOptimalSplitPos(start, end);
2077 ASSERT(split_pos.Value() >= start.Value());
2078 return SplitRangeAt(range, split_pos);
2082 LifetimePosition LAllocator::FindOptimalSplitPos(LifetimePosition start,
2083 LifetimePosition end) {
2084 int start_instr = start.InstructionIndex();
2085 int end_instr = end.InstructionIndex();
2086 ASSERT(start_instr <= end_instr);
2089 if (start_instr == end_instr)
return end;
2091 HBasicBlock* start_block = GetBlock(start);
2092 HBasicBlock* end_block = GetBlock(end);
2094 if (end_block == start_block) {
2100 HBasicBlock* block = end_block;
2102 while (block->parent_loop_header() !=
NULL &&
2103 block->parent_loop_header()->block_id() > start_block->block_id()) {
2104 block = block->parent_loop_header();
2109 if (block == end_block && !end_block->IsLoopHeader())
return end;
2112 block->first_instruction_index());
2116 void LAllocator::SpillAfter(LiveRange* range, LifetimePosition pos) {
2117 LiveRange* second_part = SplitRangeAt(range, pos);
2118 if (!AllocationOk())
return;
2123 void LAllocator::SpillBetween(LiveRange* range,
2124 LifetimePosition start,
2125 LifetimePosition end) {
2126 SpillBetweenUntil(range, start, start, end);
2130 void LAllocator::SpillBetweenUntil(LiveRange* range,
2131 LifetimePosition start,
2132 LifetimePosition until,
2133 LifetimePosition end) {
2134 CHECK(start.Value() < end.Value());
2135 LiveRange* second_part = SplitRangeAt(range, start);
2136 if (!AllocationOk())
return;
2138 if (second_part->Start().Value() < end.Value()) {
2142 LiveRange* third_part = SplitBetween(
2144 Max(second_part->Start().InstructionEnd(), until),
2146 if (!AllocationOk())
return;
2148 ASSERT(third_part != second_part);
2151 AddToUnhandledSorted(third_part);
2155 AddToUnhandledSorted(second_part);
2160 void LAllocator::Spill(LiveRange* range) {
2161 ASSERT(!range->IsSpilled());
2162 TraceAlloc(
"Spilling live range %d\n", range->id());
2163 LiveRange* first = range->TopLevel();
2165 if (!first->HasAllocatedSpillOperand()) {
2166 LOperand* op = TryReuseSpillSlot(range);
2167 if (op ==
NULL) op = chunk_->GetNextSpillSlot(range->Kind());
2168 first->SetSpillOperand(op);
2170 range->MakeSpilled(chunk()->zone());
2174 int LAllocator::RegisterCount()
const {
2175 return num_registers_;
2182 void LAllocator::Verify()
const {
2183 for (
int i = 0; i < live_ranges()->length(); ++i) {
2184 LiveRange* current = live_ranges()->at(i);
2185 if (current !=
NULL) current->Verify();
2194 : CompilationPhase(name, allocator->graph()->
info()),
2195 allocator_(allocator) {
2196 if (FLAG_hydrogen_stats) {
2197 allocator_zone_start_allocation_size_ =
2198 allocator->zone()->allocation_size();
2204 if (FLAG_hydrogen_stats) {
2205 unsigned size = allocator_->zone()->allocation_size() -
2206 allocator_zone_start_allocation_size_;
2207 isolate()->GetHStatistics()->SaveTiming(
name(), TimeDelta(), size);
2210 if (ShouldProduceTraceOutput()) {
2211 isolate()->GetHTracer()->TraceLithium(
name(), allocator_->chunk());
2212 isolate()->GetHTracer()->TraceLiveRanges(
name(), allocator_);
2216 if (allocator_ !=
NULL) allocator_->Verify();
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter NULL
LOperand * operand() const
UseInterval * next() const
LifetimePosition FirstIntersection(LiveRange *other)
static LUnallocated * cast(LOperand *op)
bool HasAllocatedSpillOperand() const
void MakeSpilled(Zone *zone)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths true
void PrintF(const char *format,...)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf map
LAllocatorPhase(const char *name, LAllocator *allocator)
static const int kMaxNumAllocatableRegisters
static LifetimePosition MaxPosition()
LiveRange(int id, Zone *zone)
static LifetimePosition Invalid()
LUnallocated * CopyUnconstrained(Zone *zone)
static int NumAllocatableRegisters()
void ShortenTo(LifetimePosition start)
bool ShouldBeAllocatedBefore(const LiveRange *other) const
UsePosition * NextUsePosition(LifetimePosition start)
static LifetimePosition FromInstructionIndex(int index)
bool CanCover(LifetimePosition position) const
#define ASSERT(condition)
bool HasRegisterPolicy() const
static int NumAllocatableRegisters()
LifetimePosition NextInstruction() const
LifetimePosition pos() const
static const char * AllocationIndexToString(int index)
bool Covers(LifetimePosition position)
void set_virtual_register(unsigned id)
LOperand * CreateAssignedOperand(Zone *zone)
static const int kMaxNumAllocatableRegisters
int virtual_register() const
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object size
void set_assigned_register(int reg, Zone *zone)
LifetimePosition End() const
bool CanBeSpilled(LifetimePosition pos)
void ConvertTo(Kind kind, int index)
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
bool HasRegisterAssigned() const
void SplitAt(LifetimePosition position, LiveRange *result, Zone *zone)
LifetimePosition end() const
void SetSpillOperand(LOperand *operand)
UsePosition * NextUsePositionRegisterIsBeneficial(LifetimePosition start)
UseInterval * first_interval() const
static void VPrint(const char *format, va_list args)
void SplitAt(LifetimePosition pos, Zone *zone)
LifetimePosition PrevInstruction() const
bool Contains(LifetimePosition point) const
void AddUsePosition(LifetimePosition pos, LOperand *operand, LOperand *hint, Zone *zone)
LifetimePosition start() const
LifetimePosition Start() const
UseInterval(LifetimePosition start, LifetimePosition end)
UsePosition(LifetimePosition pos, LOperand *operand, LOperand *hint)
void EnsureInterval(LifetimePosition start, LifetimePosition end, Zone *zone)
bool RequiresRegister() const
int assigned_register() const
UsePosition * first_pos() const
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function info
UsePosition * NextRegisterPosition(LifetimePosition start)
UsePosition * PreviousUsePositionRegisterIsBeneficial(LifetimePosition start)
LifetimePosition Intersect(const UseInterval *other) const
bool HasAnyPolicy() const
static const char * AllocationIndexToString(int index)
RegisterKind Kind() const
PerThreadAssertScopeDebugOnly< HANDLE_DEREFERENCE_ASSERT, true > AllowHandleDereference
UsePosition * next() const
void AddUseInterval(LifetimePosition start, LifetimePosition end, Zone *zone)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in name
LOperand * GetSpillOperand() const
static const int kInvalidAssignment
LifetimePosition InstructionEnd() const
bool RegisterIsBeneficial() const