34 #if V8_TARGET_ARCH_IA32
36 #elif V8_TARGET_ARCH_X64
38 #elif V8_TARGET_ARCH_ARM
40 #elif V8_TARGET_ARCH_MIPS
43 #error "Unknown architecture."
49 static inline LifetimePosition Min(LifetimePosition a, LifetimePosition b) {
50 return a.
Value() < b.Value() ? a : b;
54 static inline LifetimePosition Max(LifetimePosition a, LifetimePosition b) {
55 return a.
Value() > b.Value() ? a : b;
65 register_beneficial_(
true) {
66 if (operand_ !=
NULL && operand_->IsUnallocated()) {
76 return hint_ !=
NULL && !hint_->IsUnallocated();
86 return register_beneficial_;
102 void LiveRange::Verify()
const {
104 while (cur !=
NULL) {
112 bool LiveRange::HasOverlap(UseInterval* target)
const {
113 UseInterval* current_interval = first_interval_;
114 while (current_interval !=
NULL) {
116 if (current_interval->Contains(target->start()) ||
117 target->Contains(current_interval->start())) {
120 current_interval = current_interval->
next();
133 assigned_register_(kInvalidAssignment),
134 last_interval_(
NULL),
135 first_interval_(
NULL),
139 current_interval_(
NULL),
140 last_processed_use_(
NULL),
141 spill_operand_(new(zone)
LOperand()),
142 spill_start_index_(
kMaxInt) { }
149 assigned_register_ = reg;
151 ConvertOperands(zone);
160 ConvertOperands(zone);
166 return !spill_operand_->IsIgnored();
171 ASSERT(!operand->IsUnallocated());
173 ASSERT(spill_operand_->IsIgnored());
182 use_pos = use_pos->
next();
184 last_processed_use_ = use_pos;
215 if (use_pos ==
NULL)
return true;
240 ASSERT(!op->IsUnallocated());
250 UseInterval* LiveRange::FirstSearchIntervalForPosition(
252 if (current_interval_ ==
NULL)
return first_interval_;
254 current_interval_ =
NULL;
255 return first_interval_;
257 return current_interval_;
261 void LiveRange::AdvanceLastProcessedMarker(
262 UseInterval* to_start_of, LifetimePosition but_not_past)
const {
263 if (to_start_of ==
NULL)
return;
264 if (to_start_of->start().Value() > but_not_past.Value())
return;
265 LifetimePosition start =
267 : current_interval_->start();
268 if (to_start_of->start().Value() > start.Value()) {
269 current_interval_ = to_start_of;
282 UseInterval* current = FirstSearchIntervalForPosition(position);
286 bool split_at_start =
false;
288 if (current->start().Value() == position.
Value()) {
290 current = first_interval_;
293 while (current !=
NULL) {
294 if (current->Contains(position)) {
295 current->
SplitAt(position, zone);
309 result->last_interval_ = (last_interval_ == before)
312 result->first_interval_ = after;
313 last_interval_ = before;
319 if (split_at_start) {
324 use_before = use_after;
325 use_after = use_after->
next();
329 use_before = use_after;
330 use_after = use_after->
next();
335 if (use_before !=
NULL) {
336 use_before->next_ =
NULL;
340 result->first_pos_ = use_after;
344 last_processed_use_ =
NULL;
345 current_interval_ =
NULL;
349 result->parent_ = (parent_ ==
NULL) ?
this : parent_;
350 result->next_ = next_;
370 if (pos ==
NULL)
return false;
372 if (other_pos ==
NULL)
return true;
380 LAllocator::TraceAlloc(
"Shorten live range %d to [%d\n", id_, start.
Value());
384 first_interval_->set_start(start);
391 LAllocator::TraceAlloc(
"Ensure live range %d in interval [%d %d[\n",
396 while (first_interval_ !=
NULL &&
399 new_end = first_interval_->
end();
401 first_interval_ = first_interval_->
next();
405 new_interval->next_ = first_interval_;
406 first_interval_ = new_interval;
408 last_interval_ = new_interval;
416 LAllocator::TraceAlloc(
"Add to live range %d interval [%d %d[\n",
420 if (first_interval_ ==
NULL) {
422 first_interval_ = interval;
423 last_interval_ = interval;
426 first_interval_->set_start(start);
429 interval->set_next(first_interval_);
430 first_interval_ = interval;
436 first_interval_->start_ = Min(start, first_interval_->start_);
437 first_interval_->end_ = Max(end, first_interval_->end_);
446 LAllocator::TraceAlloc(
"Add to live range %d use position %d\n",
454 current = current->
next();
458 use_pos->set_next(first_pos_);
459 first_pos_ = use_pos;
461 use_pos->next_ = prev->next_;
462 prev->next_ = use_pos;
469 void LiveRange::ConvertOperands(
Zone* zone) {
472 while (use_pos !=
NULL) {
477 ASSERT(op->IsRegister() || op->IsDoubleRegister() ||
481 use_pos = use_pos->
next();
494 if (!
CanCover(position))
return false;
495 UseInterval* start_search = FirstSearchIntervalForPosition(position);
498 interval = interval->next()) {
500 interval->next()->start().Value() >= interval->start().Value());
501 AdvanceLastProcessedMarker(interval, position);
502 if (interval->Contains(position))
return true;
503 if (interval->start().Value() > position.
Value())
return false;
518 if (cur_intersection.
IsValid()) {
519 return cur_intersection;
524 AdvanceLastProcessedMarker(a, advance_last_processed_up_to);
533 LAllocator::LAllocator(
int num_values,
HGraph* graph)
534 : zone_(graph->zone()),
536 live_in_sets_(graph->blocks()->length(), zone_),
537 live_ranges_(num_values * 2, zone_),
538 fixed_live_ranges_(
NULL),
539 fixed_double_live_ranges_(
NULL),
540 unhandled_live_ranges_(num_values * 2, zone_),
541 active_live_ranges_(8, zone_),
542 inactive_live_ranges_(8, zone_),
543 reusable_slots_(8, zone_),
544 next_virtual_register_(num_values),
545 first_artificial_register_(num_values),
549 has_osr_entry_(
false),
550 allocation_ok_(
true) { }
553 void LAllocator::InitializeLivenessAnalysis() {
555 int block_count = graph_->blocks()->length();
556 live_in_sets_.Initialize(block_count, zone());
557 live_in_sets_.AddBlock(
NULL, block_count, zone());
561 BitVector* LAllocator::ComputeLiveOut(HBasicBlock* block) {
564 BitVector* live_out =
new(zone_) BitVector(next_virtual_register_, zone_);
567 for (HSuccessorIterator it(block->end()); !it.Done(); it.Advance()) {
570 HBasicBlock* successor = it.Current();
571 BitVector* live_in = live_in_sets_[successor->block_id()];
572 if (live_in !=
NULL) live_out->Union(*live_in);
576 int index = successor->PredecessorIndexOf(block);
577 const ZoneList<HPhi*>* phis = successor->phis();
578 for (
int i = 0; i < phis->length(); ++i) {
579 HPhi* phi = phis->at(i);
580 if (!phi->OperandAt(index)->IsConstant()) {
581 live_out->Add(phi->OperandAt(index)->id());
590 void LAllocator::AddInitialIntervals(HBasicBlock* block,
591 BitVector* live_out) {
595 block->first_instruction_index());
597 block->last_instruction_index()).NextInstruction();
598 BitVector::Iterator iterator(live_out);
599 while (!iterator.Done()) {
600 int operand_index = iterator.Current();
601 LiveRange* range = LiveRangeFor(operand_index);
602 range->AddUseInterval(start, end, zone_);
608 int LAllocator::FixedDoubleLiveRangeID(
int index) {
613 LOperand* LAllocator::AllocateFixed(LUnallocated* operand,
616 TraceAlloc(
"Allocating fixed reg for op %d\n", operand->virtual_register());
617 ASSERT(operand->HasFixedPolicy());
621 int reg_index = operand->fixed_index();
624 int reg_index = operand->fixed_index();
630 TraceAlloc(
"Fixed reg is tagged at %d\n", pos);
631 LInstruction* instr = InstructionAt(pos);
632 if (instr->HasPointerMap()) {
633 instr->pointer_map()->RecordPointer(operand, zone());
640 LiveRange* LAllocator::FixedLiveRangeFor(
int index) {
642 LiveRange* result = fixed_live_ranges_[index];
643 if (result ==
NULL) {
644 result =
new(zone_) LiveRange(FixedLiveRangeID(index), zone_);
645 ASSERT(result->IsFixed());
647 fixed_live_ranges_[index] = result;
653 LiveRange* LAllocator::FixedDoubleLiveRangeFor(
int index) {
655 LiveRange* result = fixed_double_live_ranges_[index];
656 if (result ==
NULL) {
657 result =
new(zone_) LiveRange(FixedDoubleLiveRangeID(index), zone_);
658 ASSERT(result->IsFixed());
660 fixed_double_live_ranges_[index] = result;
666 LiveRange* LAllocator::LiveRangeFor(
int index) {
667 if (index >= live_ranges_.length()) {
668 live_ranges_.AddBlock(
NULL, index - live_ranges_.length() + 1, zone());
670 LiveRange* result = live_ranges_[index];
671 if (result ==
NULL) {
672 result =
new(zone_) LiveRange(index, zone_);
673 live_ranges_[index] = result;
679 LGap* LAllocator::GetLastGap(HBasicBlock* block) {
680 int last_instruction = block->last_instruction_index();
681 int index = chunk_->NearestGapPos(last_instruction);
686 HPhi* LAllocator::LookupPhi(LOperand* operand)
const {
687 if (!operand->IsUnallocated())
return NULL;
689 HValue* instr = graph_->LookupValue(index);
690 if (instr !=
NULL && instr->IsPhi()) {
697 LiveRange* LAllocator::LiveRangeFor(LOperand* operand) {
698 if (operand->IsUnallocated()) {
700 }
else if (operand->IsRegister()) {
701 return FixedLiveRangeFor(operand->index());
702 }
else if (operand->IsDoubleRegister()) {
703 return FixedDoubleLiveRangeFor(operand->index());
710 void LAllocator::Define(LifetimePosition position,
713 LiveRange* range = LiveRangeFor(operand);
714 if (range ==
NULL)
return;
716 if (range->IsEmpty() || range->Start().Value() > position.Value()) {
718 range->AddUseInterval(position, position.NextInstruction(), zone_);
719 range->AddUsePosition(position.NextInstruction(),
NULL, zone_);
721 range->ShortenTo(position);
724 if (operand->IsUnallocated()) {
726 range->AddUsePosition(position, unalloc_operand, zone_)->set_hint(hint);
731 void LAllocator::Use(LifetimePosition block_start,
732 LifetimePosition position,
735 LiveRange* range = LiveRangeFor(operand);
736 if (range ==
NULL)
return;
737 if (operand->IsUnallocated()) {
739 range->AddUsePosition(position, unalloc_operand, zone_)->set_hint(hint);
741 range->AddUseInterval(block_start, position, zone_);
745 void LAllocator::AddConstraintsGapMove(
int index,
748 LGap* gap = GapAt(index);
749 LParallelMove* move = gap->GetOrCreateParallelMove(
LGap::START, zone());
750 if (from->IsUnallocated()) {
751 const ZoneList<LMoveOperands>* move_operands = move->move_operands();
752 for (
int i = 0; i < move_operands->length(); ++i) {
753 LMoveOperands cur = move_operands->at(i);
754 LOperand* cur_to = cur.destination();
755 if (cur_to->IsUnallocated()) {
758 move->AddMove(cur.source(), to, zone());
764 move->AddMove(from, to, zone());
768 void LAllocator::MeetRegisterConstraints(HBasicBlock* block) {
769 int start = block->first_instruction_index();
770 int end = block->last_instruction_index();
771 for (
int i = start; i <= end; ++i) {
773 LInstruction* instr =
NULL;
774 LInstruction* prev_instr =
NULL;
775 if (i < end) instr = InstructionAt(i + 1);
776 if (i > start) prev_instr = InstructionAt(i - 1);
777 MeetConstraintsBetween(prev_instr, instr, i);
778 if (!AllocationOk())
return;
784 void LAllocator::MeetConstraintsBetween(LInstruction* first,
785 LInstruction* second,
789 for (TempIterator it(first); !it.Done(); it.Advance()) {
791 if (temp->HasFixedPolicy()) {
792 AllocateFixed(temp, gap_index - 1,
false);
798 if (first !=
NULL && first->Output() !=
NULL) {
800 LiveRange* range = LiveRangeFor(first_output->virtual_register());
801 bool assigned =
false;
802 if (first_output->HasFixedPolicy()) {
804 bool is_tagged = HasTaggedValue(first_output->virtual_register());
805 AllocateFixed(first_output, gap_index, is_tagged);
808 if (first_output->IsStackSlot()) {
809 range->SetSpillOperand(first_output);
810 range->SetSpillStartIndex(gap_index - 1);
813 chunk_->AddGapMove(gap_index, first_output, output_copy);
817 range->SetSpillStartIndex(gap_index);
823 LGap* gap = GapAt(gap_index);
824 LParallelMove* move = gap->GetOrCreateParallelMove(
LGap::BEFORE, zone());
825 move->AddMove(first_output, range->GetSpillOperand(), zone());
830 if (second !=
NULL) {
831 for (UseIterator it(second); !it.Done(); it.Advance()) {
833 if (cur_input->HasFixedPolicy()) {
835 bool is_tagged = HasTaggedValue(cur_input->virtual_register());
836 AllocateFixed(cur_input, gap_index + 1, is_tagged);
837 AddConstraintsGapMove(gap_index, input_copy, cur_input);
841 ASSERT(!cur_input->IsUsedAtStart());
843 LUnallocated* input_copy = cur_input->CopyUnconstrained(zone());
844 cur_input->set_virtual_register(GetVirtualRegister());
845 if (!AllocationOk())
return;
847 if (RequiredRegisterKind(input_copy->virtual_register()) ==
849 double_artificial_registers_.Add(
850 cur_input->virtual_register() - first_artificial_register_,
854 AddConstraintsGapMove(gap_index, input_copy, cur_input);
860 if (second !=
NULL && second->Output() !=
NULL) {
862 if (second_output->HasSameAsInputPolicy()) {
865 int input_vreg = cur_input->virtual_register();
867 LUnallocated* input_copy = cur_input->CopyUnconstrained(zone());
868 cur_input->set_virtual_register(second_output->virtual_register());
869 AddConstraintsGapMove(gap_index, input_copy, cur_input);
871 if (HasTaggedValue(input_vreg) && !HasTaggedValue(output_vreg)) {
872 int index = gap_index + 1;
873 LInstruction* instr = InstructionAt(index);
874 if (instr->HasPointerMap()) {
875 instr->pointer_map()->RecordPointer(input_copy, zone());
877 }
else if (!HasTaggedValue(input_vreg) && HasTaggedValue(output_vreg)) {
890 void LAllocator::ProcessInstructions(HBasicBlock* block, BitVector* live) {
891 int block_start = block->first_instruction_index();
892 int index = block->last_instruction_index();
894 LifetimePosition block_start_position =
897 while (index >= block_start) {
898 LifetimePosition curr_position =
901 if (IsGapAt(index)) {
903 LGap* gap = GapAt(index);
904 LParallelMove* move = gap->GetOrCreateParallelMove(
LGap::START, zone());
905 const ZoneList<LMoveOperands>* move_operands = move->move_operands();
906 for (
int i = 0; i < move_operands->length(); ++i) {
907 LMoveOperands* cur = &move_operands->at(i);
908 if (cur->IsIgnored())
continue;
909 LOperand* from = cur->source();
910 LOperand* to = cur->destination();
911 HPhi* phi = LookupPhi(to);
915 if (!phi->block()->IsLoopHeader()) {
916 hint = LiveRangeFor(phi->id())->FirstHint();
919 if (to->IsUnallocated()) {
921 Define(curr_position, to, from);
928 Define(curr_position, to, from);
931 Use(block_start_position, curr_position, from, hint);
932 if (from->IsUnallocated()) {
938 LInstruction* instr = InstructionAt(index);
941 LOperand* output = instr->Output();
942 if (output !=
NULL) {
943 if (output->IsUnallocated()) {
946 Define(curr_position, output,
NULL);
949 if (instr->IsMarkedAsCall()) {
951 if (output ==
NULL || !output->IsRegister() ||
952 output->index() != i) {
953 LiveRange* range = FixedLiveRangeFor(i);
954 range->AddUseInterval(curr_position,
955 curr_position.InstructionEnd(),
961 if (instr->IsMarkedAsCall()) {
963 if (output ==
NULL || !output->IsDoubleRegister() ||
964 output->index() != i) {
965 LiveRange* range = FixedDoubleLiveRangeFor(i);
966 range->AddUseInterval(curr_position,
967 curr_position.InstructionEnd(),
973 for (UseIterator it(instr); !it.Done(); it.Advance()) {
974 LOperand* input = it.Current();
976 LifetimePosition use_pos;
977 if (input->IsUnallocated() &&
979 use_pos = curr_position;
981 use_pos = curr_position.InstructionEnd();
984 Use(block_start_position, use_pos, input,
NULL);
985 if (input->IsUnallocated()) {
990 for (TempIterator it(instr); !it.Done(); it.Advance()) {
991 LOperand* temp = it.Current();
992 if (instr->IsMarkedAsCall()) {
993 if (temp->IsRegister())
continue;
994 if (temp->IsUnallocated()) {
996 if (temp_unalloc->HasFixedPolicy()) {
1001 Use(block_start_position, curr_position.InstructionEnd(), temp,
NULL);
1002 Define(curr_position, temp,
NULL);
1012 void LAllocator::ResolvePhis(HBasicBlock* block) {
1013 const ZoneList<HPhi*>* phis = block->phis();
1014 for (
int i = 0; i < phis->length(); ++i) {
1015 HPhi* phi = phis->at(i);
1017 phi_operand->set_virtual_register(phi->id());
1018 for (
int j = 0; j < phi->OperandCount(); ++j) {
1019 HValue* op = phi->OperandAt(j);
1020 LOperand* operand =
NULL;
1021 if (op->IsConstant() && op->EmitAtUses()) {
1023 operand = chunk_->DefineConstantOperand(constant);
1025 ASSERT(!op->EmitAtUses());
1027 unalloc->set_virtual_register(op->id());
1030 HBasicBlock* cur_block = block->predecessors()->at(j);
1033 chunk_->AddGapMove(cur_block->last_instruction_index() - 1,
1045 LInstruction* branch =
1046 InstructionAt(cur_block->last_instruction_index());
1047 if (branch->HasPointerMap()) {
1048 if (phi->representation().IsTagged()) {
1049 branch->pointer_map()->RecordPointer(phi_operand, zone());
1050 }
else if (!phi->representation().IsDouble()) {
1051 branch->pointer_map()->RecordUntagged(phi_operand, zone());
1056 LiveRange* live_range = LiveRangeFor(phi->id());
1057 LLabel* label = chunk_->GetLabel(phi->block()->block_id());
1058 label->GetOrCreateParallelMove(
LGap::START, zone())->
1059 AddMove(phi_operand, live_range->GetSpillOperand(), zone());
1060 live_range->SetSpillStartIndex(phi->block()->first_instruction_index());
1065 bool LAllocator::Allocate(LChunk* chunk) {
1067 chunk_ =
static_cast<LPlatformChunk*
>(chunk);
1068 MeetRegisterConstraints();
1069 if (!AllocationOk())
return false;
1072 AllocateGeneralRegisters();
1073 if (!AllocationOk())
return false;
1074 AllocateDoubleRegisters();
1075 if (!AllocationOk())
return false;
1076 PopulatePointerMaps();
1077 if (has_osr_entry_) ProcessOsrEntry();
1079 ResolveControlFlow();
1084 void LAllocator::MeetRegisterConstraints() {
1085 HPhase phase(
"L_Register constraints", chunk_);
1086 first_artificial_register_ = next_virtual_register_;
1087 const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
1088 for (
int i = 0; i < blocks->length(); ++i) {
1089 HBasicBlock* block = blocks->at(i);
1090 MeetRegisterConstraints(block);
1091 if (!AllocationOk())
return;
1096 void LAllocator::ResolvePhis() {
1097 HPhase phase(
"L_Resolve phis", chunk_);
1100 const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
1101 for (
int block_id = blocks->length() - 1; block_id >= 0; --block_id) {
1102 HBasicBlock* block = blocks->at(block_id);
1108 void LAllocator::ResolveControlFlow(LiveRange* range,
1110 HBasicBlock* pred) {
1111 LifetimePosition pred_end =
1113 LifetimePosition cur_start =
1115 LiveRange* pred_cover =
NULL;
1116 LiveRange* cur_cover =
NULL;
1117 LiveRange* cur_range = range;
1118 while (cur_range !=
NULL && (cur_cover ==
NULL || pred_cover ==
NULL)) {
1119 if (cur_range->CanCover(cur_start)) {
1121 cur_cover = cur_range;
1123 if (cur_range->CanCover(pred_end)) {
1125 pred_cover = cur_range;
1127 cur_range = cur_range->next();
1130 if (cur_cover->IsSpilled())
return;
1132 if (pred_cover != cur_cover) {
1133 LOperand* pred_op = pred_cover->CreateAssignedOperand(zone_);
1134 LOperand* cur_op = cur_cover->CreateAssignedOperand(zone_);
1135 if (!pred_op->Equals(cur_op)) {
1137 if (block->predecessors()->length() == 1) {
1138 gap = GapAt(block->first_instruction_index());
1140 ASSERT(pred->end()->SecondSuccessor() ==
NULL);
1141 gap = GetLastGap(pred);
1151 LInstruction* branch = InstructionAt(pred->last_instruction_index());
1152 if (branch->HasPointerMap()) {
1153 if (HasTaggedValue(range->id())) {
1154 branch->pointer_map()->RecordPointer(cur_op, zone());
1155 }
else if (!cur_op->IsDoubleStackSlot() &&
1156 !cur_op->IsDoubleRegister()) {
1157 branch->pointer_map()->RemovePointer(cur_op);
1161 gap->GetOrCreateParallelMove(
1162 LGap::START, zone())->AddMove(pred_op, cur_op, zone());
1168 LParallelMove* LAllocator::GetConnectingParallelMove(LifetimePosition pos) {
1169 int index = pos.InstructionIndex();
1170 if (IsGapAt(index)) {
1171 LGap* gap = GapAt(index);
1172 return gap->GetOrCreateParallelMove(
1175 int gap_pos = pos.IsInstructionStart() ? (index - 1) : (index + 1);
1176 return GapAt(gap_pos)->GetOrCreateParallelMove(
1181 HBasicBlock* LAllocator::GetBlock(LifetimePosition pos) {
1182 LGap* gap = GapAt(chunk_->NearestGapPos(pos.InstructionIndex()));
1183 return gap->block();
1187 void LAllocator::ConnectRanges() {
1188 HPhase phase(
"L_Connect ranges",
this);
1189 for (
int i = 0; i < live_ranges()->length(); ++i) {
1190 LiveRange* first_range = live_ranges()->at(i);
1191 if (first_range ==
NULL || first_range->parent() !=
NULL)
continue;
1193 LiveRange* second_range = first_range->next();
1194 while (second_range !=
NULL) {
1195 LifetimePosition pos = second_range->Start();
1197 if (!second_range->IsSpilled()) {
1200 if (first_range->End().Value() == pos.Value()) {
1201 bool should_insert =
true;
1202 if (IsBlockBoundary(pos)) {
1203 should_insert = CanEagerlyResolveControlFlow(GetBlock(pos));
1205 if (should_insert) {
1206 LParallelMove* move = GetConnectingParallelMove(pos);
1207 LOperand* prev_operand = first_range->CreateAssignedOperand(zone_);
1208 LOperand* cur_operand = second_range->CreateAssignedOperand(zone_);
1209 move->AddMove(prev_operand, cur_operand, zone());
1214 first_range = second_range;
1215 second_range = second_range->next();
1221 bool LAllocator::CanEagerlyResolveControlFlow(HBasicBlock* block)
const {
1222 if (block->predecessors()->length() != 1)
return false;
1223 return block->predecessors()->first()->block_id() == block->block_id() - 1;
1227 void LAllocator::ResolveControlFlow() {
1228 HPhase phase(
"L_Resolve control flow",
this);
1229 const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
1230 for (
int block_id = 1; block_id < blocks->length(); ++block_id) {
1231 HBasicBlock* block = blocks->at(block_id);
1232 if (CanEagerlyResolveControlFlow(block))
continue;
1233 BitVector* live = live_in_sets_[block->block_id()];
1234 BitVector::Iterator iterator(live);
1235 while (!iterator.Done()) {
1236 int operand_index = iterator.Current();
1237 for (
int i = 0; i < block->predecessors()->length(); ++i) {
1238 HBasicBlock* cur = block->predecessors()->at(i);
1239 LiveRange* cur_range = LiveRangeFor(operand_index);
1240 ResolveControlFlow(cur_range, block, cur);
1248 void LAllocator::BuildLiveRanges() {
1249 HPhase phase(
"L_Build live ranges",
this);
1250 InitializeLivenessAnalysis();
1252 const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
1253 for (
int block_id = blocks->length() - 1; block_id >= 0; --block_id) {
1254 HBasicBlock* block = blocks->at(block_id);
1255 BitVector* live = ComputeLiveOut(block);
1258 AddInitialIntervals(block, live);
1262 ProcessInstructions(block, live);
1264 const ZoneList<HPhi*>* phis = block->phis();
1265 for (
int i = 0; i < phis->length(); ++i) {
1268 HPhi* phi = phis->at(i);
1269 live->Remove(phi->id());
1271 LOperand* hint =
NULL;
1272 LOperand* phi_operand =
NULL;
1273 LGap* gap = GetLastGap(phi->block()->predecessors()->at(0));
1274 LParallelMove* move = gap->GetOrCreateParallelMove(
LGap::START, zone());
1275 for (
int j = 0; j < move->move_operands()->length(); ++j) {
1276 LOperand* to = move->move_operands()->at(j).destination();
1277 if (to->IsUnallocated() &&
1279 hint = move->move_operands()->at(j).source();
1287 block->first_instruction_index());
1288 Define(block_start, phi_operand, hint);
1293 live_in_sets_[block_id] = live;
1297 if (block->IsLoopHeader()) {
1302 HBasicBlock* back_edge = block->loop_information()->GetLastBackEdge();
1303 BitVector::Iterator iterator(live);
1305 block->first_instruction_index());
1307 back_edge->last_instruction_index()).NextInstruction();
1308 while (!iterator.Done()) {
1309 int operand_index = iterator.Current();
1310 LiveRange* range = LiveRangeFor(operand_index);
1311 range->EnsureInterval(start, end, zone_);
1315 for (
int i = block->block_id() + 1; i <= back_edge->block_id(); ++i) {
1316 live_in_sets_[i]->Union(*live);
1321 if (block_id == 0) {
1322 BitVector::Iterator iterator(live);
1324 while (!iterator.Done()) {
1326 int operand_index = iterator.Current();
1328 *chunk_->info()->function()->debug_name()->ToCString());
1329 PrintF(
"Value %d used before first definition!\n", operand_index);
1330 LiveRange* range = LiveRangeFor(operand_index);
1331 PrintF(
"First use is at %d\n", range->first_pos()->pos().Value());
1341 bool LAllocator::SafePointsAreInOrder()
const {
1342 const ZoneList<LPointerMap*>* pointer_maps = chunk_->pointer_maps();
1344 for (
int i = 0; i < pointer_maps->length(); ++i) {
1345 LPointerMap* map = pointer_maps->at(i);
1346 if (safe_point > map->lithium_position())
return false;
1347 safe_point = map->lithium_position();
1353 void LAllocator::PopulatePointerMaps() {
1354 HPhase phase(
"L_Populate pointer maps",
this);
1355 const ZoneList<LPointerMap*>* pointer_maps = chunk_->pointer_maps();
1357 ASSERT(SafePointsAreInOrder());
1361 int first_safe_point_index = 0;
1362 int last_range_start = 0;
1363 for (
int range_idx = 0; range_idx < live_ranges()->length(); ++range_idx) {
1364 LiveRange* range = live_ranges()->at(range_idx);
1365 if (range ==
NULL)
continue;
1367 if (range->parent() !=
NULL)
continue;
1369 if (!HasTaggedValue(range->id()))
continue;
1371 if (range->IsEmpty())
continue;
1374 int start = range->Start().InstructionIndex();
1376 for (LiveRange* cur = range; cur !=
NULL; cur = cur->next()) {
1377 LifetimePosition this_end = cur->End();
1378 if (this_end.InstructionIndex() > end) end = this_end.InstructionIndex();
1379 ASSERT(cur->Start().InstructionIndex() >= start);
1385 if (start < last_range_start) {
1386 first_safe_point_index = 0;
1388 last_range_start = start;
1392 while (first_safe_point_index < pointer_maps->length()) {
1393 LPointerMap* map = pointer_maps->at(first_safe_point_index);
1394 int safe_point = map->lithium_position();
1395 if (safe_point >= start)
break;
1396 first_safe_point_index++;
1400 for (
int safe_point_index = first_safe_point_index;
1401 safe_point_index < pointer_maps->length();
1402 ++safe_point_index) {
1403 LPointerMap* map = pointer_maps->at(safe_point_index);
1404 int safe_point = map->lithium_position();
1407 if (safe_point - 1 > end)
break;
1411 LifetimePosition safe_point_pos =
1413 LiveRange* cur = range;
1414 while (cur !=
NULL && !cur->Covers(safe_point_pos.PrevInstruction())) {
1417 if (cur ==
NULL)
continue;
1421 if (range->HasAllocatedSpillOperand() &&
1422 safe_point >= range->spill_start_index()) {
1423 TraceAlloc(
"Pointer for range %d (spilled at %d) at safe point %d\n",
1424 range->id(), range->spill_start_index(), safe_point);
1425 map->RecordPointer(range->GetSpillOperand(), zone());
1428 if (!cur->IsSpilled()) {
1429 TraceAlloc(
"Pointer in register for range %d (start at %d) "
1430 "at safe point %d\n",
1431 cur->id(), cur->Start().Value(), safe_point);
1432 LOperand* operand = cur->CreateAssignedOperand(zone_);
1433 ASSERT(!operand->IsStackSlot());
1434 map->RecordPointer(operand, zone());
1441 void LAllocator::ProcessOsrEntry() {
1442 const ZoneList<LInstruction*>* instrs = chunk_->instructions();
1446 while (++index < instrs->length() &&
1447 !instrs->at(index)->IsOsrEntry()) {
1449 ASSERT(index < instrs->length());
1450 LOsrEntry* instruction = LOsrEntry::cast(instrs->at(index));
1453 for (
int i = 0; i < live_ranges()->length(); ++i) {
1454 LiveRange* range = live_ranges()->at(i);
1455 if (range !=
NULL) {
1456 if (range->Covers(position) &&
1457 range->HasRegisterAssigned() &&
1458 range->TopLevel()->HasAllocatedSpillOperand()) {
1459 int reg_index = range->assigned_register();
1460 LOperand* spill_operand = range->TopLevel()->GetSpillOperand();
1461 if (range->IsDouble()) {
1462 instruction->MarkSpilledDoubleRegister(reg_index, spill_operand);
1464 instruction->MarkSpilledRegister(reg_index, spill_operand);
1472 void LAllocator::AllocateGeneralRegisters() {
1473 HPhase phase(
"L_Allocate general registers",
this);
1475 AllocateRegisters();
1479 void LAllocator::AllocateDoubleRegisters() {
1480 HPhase phase(
"L_Allocate double registers",
this);
1483 AllocateRegisters();
1487 void LAllocator::AllocateRegisters() {
1488 ASSERT(unhandled_live_ranges_.is_empty());
1490 for (
int i = 0; i < live_ranges_.length(); ++i) {
1491 if (live_ranges_[i] !=
NULL) {
1492 if (RequiredRegisterKind(live_ranges_[i]->
id()) == mode_) {
1493 AddToUnhandledUnsorted(live_ranges_[i]);
1498 ASSERT(UnhandledIsSorted());
1500 ASSERT(reusable_slots_.is_empty());
1501 ASSERT(active_live_ranges_.is_empty());
1502 ASSERT(inactive_live_ranges_.is_empty());
1505 for (
int i = 0; i < fixed_double_live_ranges_.length(); ++i) {
1506 LiveRange* current = fixed_double_live_ranges_.at(i);
1507 if (current !=
NULL) {
1508 AddToInactive(current);
1512 for (
int i = 0; i < fixed_live_ranges_.length(); ++i) {
1513 LiveRange* current = fixed_live_ranges_.at(i);
1514 if (current !=
NULL) {
1515 AddToInactive(current);
1520 while (!unhandled_live_ranges_.is_empty()) {
1521 ASSERT(UnhandledIsSorted());
1522 LiveRange* current = unhandled_live_ranges_.RemoveLast();
1523 ASSERT(UnhandledIsSorted());
1524 LifetimePosition position = current->Start();
1525 TraceAlloc(
"Processing interval %d start=%d\n",
1529 if (current->HasAllocatedSpillOperand()) {
1530 TraceAlloc(
"Live range %d already has a spill operand\n", current->id());
1531 LifetimePosition next_pos = position;
1532 if (IsGapAt(next_pos.InstructionIndex())) {
1533 next_pos = next_pos.NextInstruction();
1535 UsePosition* pos = current->NextUsePositionRegisterIsBeneficial(next_pos);
1541 }
else if (pos->pos().Value() >
1542 current->Start().NextInstruction().Value()) {
1545 SpillBetween(current, current->Start(), pos->pos());
1546 if (!AllocationOk())
return;
1547 ASSERT(UnhandledIsSorted());
1552 for (
int i = 0; i < active_live_ranges_.length(); ++i) {
1553 LiveRange* cur_active = active_live_ranges_.at(i);
1554 if (cur_active->End().Value() <= position.Value()) {
1555 ActiveToHandled(cur_active);
1557 }
else if (!cur_active->Covers(position)) {
1558 ActiveToInactive(cur_active);
1563 for (
int i = 0; i < inactive_live_ranges_.length(); ++i) {
1564 LiveRange* cur_inactive = inactive_live_ranges_.at(i);
1565 if (cur_inactive->End().Value() <= position.Value()) {
1566 InactiveToHandled(cur_inactive);
1568 }
else if (cur_inactive->Covers(position)) {
1569 InactiveToActive(cur_inactive);
1574 ASSERT(!current->HasRegisterAssigned() && !current->IsSpilled());
1576 bool result = TryAllocateFreeReg(current);
1577 if (!AllocationOk())
return;
1579 if (!result) AllocateBlockedReg(current);
1580 if (!AllocationOk())
return;
1582 if (current->HasRegisterAssigned()) {
1583 AddToActive(current);
1587 reusable_slots_.Rewind(0);
1588 active_live_ranges_.Rewind(0);
1589 inactive_live_ranges_.Rewind(0);
1593 const char* LAllocator::RegisterName(
int allocation_index) {
1602 void LAllocator::TraceAlloc(
const char* msg, ...) {
1603 if (FLAG_trace_alloc) {
1605 va_start(arguments, msg);
1612 bool LAllocator::HasTaggedValue(
int virtual_register)
const {
1613 HValue* value = graph_->LookupValue(virtual_register);
1614 if (value ==
NULL)
return false;
1615 return value->representation().IsTagged();
1619 RegisterKind LAllocator::RequiredRegisterKind(
int virtual_register)
const {
1620 if (virtual_register < first_artificial_register_) {
1621 HValue* value = graph_->LookupValue(virtual_register);
1622 if (value !=
NULL && value->representation().IsDouble()) {
1625 }
else if (double_artificial_registers_.Contains(
1626 virtual_register - first_artificial_register_)) {
1634 void LAllocator::AddToActive(LiveRange* range) {
1635 TraceAlloc(
"Add live range %d to active\n", range->id());
1636 active_live_ranges_.Add(range, zone());
1640 void LAllocator::AddToInactive(LiveRange* range) {
1641 TraceAlloc(
"Add live range %d to inactive\n", range->id());
1642 inactive_live_ranges_.Add(range, zone());
1646 void LAllocator::AddToUnhandledSorted(LiveRange* range) {
1647 if (range ==
NULL || range->IsEmpty())
return;
1648 ASSERT(!range->HasRegisterAssigned() && !range->IsSpilled());
1649 for (
int i = unhandled_live_ranges_.length() - 1; i >= 0; --i) {
1650 LiveRange* cur_range = unhandled_live_ranges_.at(i);
1651 if (range->ShouldBeAllocatedBefore(cur_range)) {
1652 TraceAlloc(
"Add live range %d to unhandled at %d\n", range->id(), i + 1);
1653 unhandled_live_ranges_.InsertAt(i + 1, range, zone());
1654 ASSERT(UnhandledIsSorted());
1658 TraceAlloc(
"Add live range %d to unhandled at start\n", range->id());
1659 unhandled_live_ranges_.InsertAt(0, range, zone());
1660 ASSERT(UnhandledIsSorted());
1664 void LAllocator::AddToUnhandledUnsorted(LiveRange* range) {
1665 if (range ==
NULL || range->IsEmpty())
return;
1666 ASSERT(!range->HasRegisterAssigned() && !range->IsSpilled());
1667 TraceAlloc(
"Add live range %d to unhandled unsorted at end\n", range->id());
1668 unhandled_live_ranges_.Add(range, zone());
1672 static int UnhandledSortHelper(LiveRange*
const* a, LiveRange*
const* b) {
1673 ASSERT(!(*a)->ShouldBeAllocatedBefore(*b) ||
1674 !(*b)->ShouldBeAllocatedBefore(*a));
1675 if ((*a)->ShouldBeAllocatedBefore(*b))
return 1;
1676 if ((*b)->ShouldBeAllocatedBefore(*a))
return -1;
1677 return (*a)->id() - (*b)->id();
1684 void LAllocator::SortUnhandled() {
1685 TraceAlloc(
"Sort unhandled\n");
1686 unhandled_live_ranges_.Sort(&UnhandledSortHelper);
1690 bool LAllocator::UnhandledIsSorted() {
1691 int len = unhandled_live_ranges_.length();
1692 for (
int i = 1; i < len; i++) {
1693 LiveRange* a = unhandled_live_ranges_.at(i - 1);
1694 LiveRange* b = unhandled_live_ranges_.at(i);
1695 if (a->Start().Value() < b->Start().Value())
return false;
1701 void LAllocator::FreeSpillSlot(LiveRange* range) {
1703 if (range->next() !=
NULL)
return;
1705 if (!range->TopLevel()->HasAllocatedSpillOperand())
return;
1707 int index = range->TopLevel()->GetSpillOperand()->index();
1709 reusable_slots_.Add(range, zone());
1714 LOperand* LAllocator::TryReuseSpillSlot(LiveRange* range) {
1715 if (reusable_slots_.is_empty())
return NULL;
1716 if (reusable_slots_.first()->End().Value() >
1717 range->TopLevel()->Start().Value()) {
1720 LOperand* result = reusable_slots_.first()->TopLevel()->GetSpillOperand();
1721 reusable_slots_.Remove(0);
1726 void LAllocator::ActiveToHandled(LiveRange* range) {
1727 ASSERT(active_live_ranges_.Contains(range));
1728 active_live_ranges_.RemoveElement(range);
1729 TraceAlloc(
"Moving live range %d from active to handled\n", range->id());
1730 FreeSpillSlot(range);
1734 void LAllocator::ActiveToInactive(LiveRange* range) {
1735 ASSERT(active_live_ranges_.Contains(range));
1736 active_live_ranges_.RemoveElement(range);
1737 inactive_live_ranges_.Add(range, zone());
1738 TraceAlloc(
"Moving live range %d from active to inactive\n", range->id());
1742 void LAllocator::InactiveToHandled(LiveRange* range) {
1743 ASSERT(inactive_live_ranges_.Contains(range));
1744 inactive_live_ranges_.RemoveElement(range);
1745 TraceAlloc(
"Moving live range %d from inactive to handled\n", range->id());
1746 FreeSpillSlot(range);
1750 void LAllocator::InactiveToActive(LiveRange* range) {
1751 ASSERT(inactive_live_ranges_.Contains(range));
1752 inactive_live_ranges_.RemoveElement(range);
1753 active_live_ranges_.Add(range, zone());
1754 TraceAlloc(
"Moving live range %d from inactive to active\n", range->id());
1761 Register::kNumAllocatableRegisters);
1764 bool LAllocator::TryAllocateFreeReg(LiveRange* current) {
1771 for (
int i = 0; i < active_live_ranges_.length(); ++i) {
1772 LiveRange* cur_active = active_live_ranges_.at(i);
1773 free_until_pos[cur_active->assigned_register()] =
1777 for (
int i = 0; i < inactive_live_ranges_.length(); ++i) {
1778 LiveRange* cur_inactive = inactive_live_ranges_.at(i);
1779 ASSERT(cur_inactive->End().Value() > current->Start().Value());
1780 LifetimePosition next_intersection =
1781 cur_inactive->FirstIntersection(current);
1782 if (!next_intersection.IsValid())
continue;
1783 int cur_reg = cur_inactive->assigned_register();
1784 free_until_pos[cur_reg] = Min(free_until_pos[cur_reg], next_intersection);
1787 UsePosition* hinted_use = current->FirstPosWithHint();
1788 if (hinted_use !=
NULL) {
1789 LOperand* hint = hinted_use->hint();
1790 if (hint->IsRegister() || hint->IsDoubleRegister()) {
1791 int register_index = hint->index();
1793 "Found reg hint %s (free until [%d) for live range %d (end %d[).\n",
1794 RegisterName(register_index),
1795 free_until_pos[register_index].Value(),
1797 current->End().Value());
1800 if (free_until_pos[register_index].Value() >= current->End().Value()) {
1801 TraceAlloc(
"Assigning preferred reg %s to live range %d\n",
1802 RegisterName(register_index),
1804 current->set_assigned_register(register_index, mode_, zone_);
1812 for (
int i = 1; i < RegisterCount(); ++i) {
1813 if (free_until_pos[i].Value() > free_until_pos[reg].Value()) {
1818 LifetimePosition pos = free_until_pos[reg];
1820 if (pos.Value() <= current->Start().Value()) {
1825 if (pos.Value() < current->End().Value()) {
1828 LiveRange* tail = SplitRangeAt(current, pos);
1829 if (!AllocationOk())
return false;
1830 AddToUnhandledSorted(tail);
1836 ASSERT(pos.Value() >= current->End().Value());
1837 TraceAlloc(
"Assigning free reg %s to live range %d\n",
1840 current->set_assigned_register(reg, mode_, zone_);
1846 void LAllocator::AllocateBlockedReg(LiveRange* current) {
1847 UsePosition* register_use = current->NextRegisterPosition(current->Start());
1848 if (register_use ==
NULL) {
1863 for (
int i = 0; i < active_live_ranges_.length(); ++i) {
1864 LiveRange* range = active_live_ranges_[i];
1865 int cur_reg = range->assigned_register();
1866 if (range->IsFixed() || !range->CanBeSpilled(current->Start())) {
1867 block_pos[cur_reg] = use_pos[cur_reg] =
1870 UsePosition* next_use = range->NextUsePositionRegisterIsBeneficial(
1872 if (next_use ==
NULL) {
1873 use_pos[cur_reg] = range->End();
1875 use_pos[cur_reg] = next_use->pos();
1880 for (
int i = 0; i < inactive_live_ranges_.length(); ++i) {
1881 LiveRange* range = inactive_live_ranges_.at(i);
1882 ASSERT(range->End().Value() > current->Start().Value());
1883 LifetimePosition next_intersection = range->FirstIntersection(current);
1884 if (!next_intersection.IsValid())
continue;
1885 int cur_reg = range->assigned_register();
1886 if (range->IsFixed()) {
1887 block_pos[cur_reg] = Min(block_pos[cur_reg], next_intersection);
1888 use_pos[cur_reg] = Min(block_pos[cur_reg], use_pos[cur_reg]);
1890 use_pos[cur_reg] = Min(use_pos[cur_reg], next_intersection);
1895 for (
int i = 1; i < RegisterCount(); ++i) {
1896 if (use_pos[i].Value() > use_pos[reg].Value()) {
1901 LifetimePosition pos = use_pos[reg];
1903 if (pos.Value() < register_use->pos().Value()) {
1911 ASSERT(current->Start().Value() < register_use->pos().Value());
1912 SpillBetween(current, current->Start(), register_use->pos());
1916 if (block_pos[reg].Value() < current->End().Value()) {
1919 LiveRange* tail = SplitBetween(current,
1921 block_pos[reg].InstructionStart());
1922 AddToUnhandledSorted(tail);
1926 ASSERT(block_pos[reg].Value() >= current->End().Value());
1927 TraceAlloc(
"Assigning blocked reg %s to live range %d\n",
1930 current->set_assigned_register(reg, mode_, zone_);
1935 SplitAndSpillIntersecting(current);
1939 void LAllocator::SplitAndSpillIntersecting(LiveRange* current) {
1940 ASSERT(current->HasRegisterAssigned());
1941 int reg = current->assigned_register();
1942 LifetimePosition split_pos = current->Start();
1943 for (
int i = 0; i < active_live_ranges_.length(); ++i) {
1944 LiveRange* range = active_live_ranges_[i];
1945 if (range->assigned_register() == reg) {
1946 UsePosition* next_pos = range->NextRegisterPosition(current->Start());
1947 if (next_pos ==
NULL) {
1948 SpillAfter(range, split_pos);
1950 SpillBetween(range, split_pos, next_pos->pos());
1952 ActiveToHandled(range);
1957 for (
int i = 0; i < inactive_live_ranges_.length(); ++i) {
1958 LiveRange* range = inactive_live_ranges_[i];
1959 ASSERT(range->End().Value() > current->Start().Value());
1960 if (range->assigned_register() == reg && !range->IsFixed()) {
1961 LifetimePosition next_intersection = range->FirstIntersection(current);
1962 if (next_intersection.IsValid()) {
1963 UsePosition* next_pos = range->NextRegisterPosition(current->Start());
1964 if (next_pos ==
NULL) {
1965 SpillAfter(range, split_pos);
1967 next_intersection = Min(next_intersection, next_pos->pos());
1968 SpillBetween(range, split_pos, next_intersection);
1970 InactiveToHandled(range);
1978 bool LAllocator::IsBlockBoundary(LifetimePosition pos) {
1979 return pos.IsInstructionStart() &&
1980 InstructionAt(pos.InstructionIndex())->IsLabel();
1984 LiveRange* LAllocator::SplitRangeAt(LiveRange* range, LifetimePosition pos) {
1985 ASSERT(!range->IsFixed());
1986 TraceAlloc(
"Splitting live range %d at %d\n", range->id(), pos.Value());
1988 if (pos.Value() <= range->Start().Value())
return range;
1992 ASSERT(pos.IsInstructionStart() ||
1993 !chunk_->instructions()->at(pos.InstructionIndex())->IsControl());
1995 LiveRange* result = LiveRangeFor(GetVirtualRegister());
1996 if (!AllocationOk())
return NULL;
1997 range->SplitAt(pos, result, zone_);
2002 LiveRange* LAllocator::SplitBetween(LiveRange* range,
2003 LifetimePosition start,
2004 LifetimePosition end) {
2005 ASSERT(!range->IsFixed());
2006 TraceAlloc(
"Splitting live range %d in position between [%d, %d]\n",
2011 LifetimePosition split_pos = FindOptimalSplitPos(start, end);
2012 ASSERT(split_pos.Value() >= start.Value());
2013 return SplitRangeAt(range, split_pos);
2017 LifetimePosition LAllocator::FindOptimalSplitPos(LifetimePosition start,
2018 LifetimePosition end) {
2019 int start_instr = start.InstructionIndex();
2020 int end_instr = end.InstructionIndex();
2021 ASSERT(start_instr <= end_instr);
2024 if (start_instr == end_instr)
return end;
2026 HBasicBlock* start_block = GetBlock(start);
2027 HBasicBlock* end_block = GetBlock(end);
2029 if (end_block == start_block) {
2035 HBasicBlock* block = end_block;
2037 while (block->parent_loop_header() !=
NULL &&
2038 block->parent_loop_header()->block_id() > start_block->block_id()) {
2039 block = block->parent_loop_header();
2044 if (block == end_block && !end_block->IsLoopHeader())
return end;
2047 block->first_instruction_index());
2051 void LAllocator::SpillAfter(LiveRange* range, LifetimePosition pos) {
2052 LiveRange* second_part = SplitRangeAt(range, pos);
2053 if (!AllocationOk())
return;
2058 void LAllocator::SpillBetween(LiveRange* range,
2059 LifetimePosition start,
2060 LifetimePosition end) {
2061 ASSERT(start.Value() < end.Value());
2062 LiveRange* second_part = SplitRangeAt(range, start);
2063 if (!AllocationOk())
return;
2065 if (second_part->Start().Value() < end.Value()) {
2069 LiveRange* third_part = SplitBetween(
2071 second_part->Start().InstructionEnd(),
2072 end.PrevInstruction().InstructionEnd());
2074 ASSERT(third_part != second_part);
2077 AddToUnhandledSorted(third_part);
2081 AddToUnhandledSorted(second_part);
2086 void LAllocator::Spill(LiveRange* range) {
2087 ASSERT(!range->IsSpilled());
2088 TraceAlloc(
"Spilling live range %d\n", range->id());
2089 LiveRange* first = range->TopLevel();
2091 if (!first->HasAllocatedSpillOperand()) {
2092 LOperand* op = TryReuseSpillSlot(range);
2094 first->SetSpillOperand(op);
2096 range->MakeSpilled(zone_);
2100 int LAllocator::RegisterCount()
const {
2101 return num_registers_;
2108 void LAllocator::Verify()
const {
2109 for (
int i = 0; i < live_ranges()->length(); ++i) {
2110 LiveRange* current = live_ranges()->at(i);
2111 if (current !=
NULL) current->Verify();
static HPhi * cast(HValue *value)
LOperand * operand() const
UseInterval * next() const
LifetimePosition FirstIntersection(LiveRange *other)
static LUnallocated * cast(LOperand *op)
bool HasAllocatedSpillOperand() const
void MakeSpilled(Zone *zone)
void PrintF(const char *format,...)
UsePosition * AddUsePosition(LifetimePosition pos, LOperand *operand, Zone *zone)
static LifetimePosition MaxPosition()
LiveRange(int id, Zone *zone)
static LifetimePosition Invalid()
LUnallocated * CopyUnconstrained(Zone *zone)
void ShortenTo(LifetimePosition start)
static const int kNumAllocatableRegisters
bool ShouldBeAllocatedBefore(const LiveRange *other) const
UsePosition * NextUsePosition(LifetimePosition start)
static LifetimePosition FromInstructionIndex(int index)
bool CanCover(LifetimePosition position) const
#define ASSERT(condition)
bool HasRegisterPolicy() const
static LDoubleRegister * Create(int index, Zone *zone)
LifetimePosition NextInstruction() const
LifetimePosition pos() const
static const char * AllocationIndexToString(int index)
bool Covers(LifetimePosition position)
void set_virtual_register(unsigned id)
LOperand * CreateAssignedOperand(Zone *zone)
int virtual_register() const
STATIC_ASSERT((FixedDoubleArray::kHeaderSize &kDoubleAlignmentMask)==0)
LifetimePosition End() const
bool CanBeSpilled(LifetimePosition pos)
void ConvertTo(Kind kind, int index)
bool HasRegisterAssigned() const
void SplitAt(LifetimePosition position, LiveRange *result, Zone *zone)
LifetimePosition end() const
void SetSpillOperand(LOperand *operand)
UsePosition * NextUsePositionRegisterIsBeneficial(LifetimePosition start)
UseInterval * first_interval() const
static void VPrint(const char *format, va_list args)
void SplitAt(LifetimePosition pos, Zone *zone)
bool Contains(LifetimePosition point) const
activate correct semantics for inheriting readonliness false
LifetimePosition start() const
LifetimePosition Start() const
UseInterval(LifetimePosition start, LifetimePosition end)
void EnsureInterval(LifetimePosition start, LifetimePosition end, Zone *zone)
UsePosition(LifetimePosition pos, LOperand *operand)
bool RequiresRegister() const
int assigned_register() const
UsePosition * first_pos() const
UsePosition * NextRegisterPosition(LifetimePosition start)
UsePosition * FirstPosWithHint() const
void set_assigned_register(int reg, RegisterKind register_kind, Zone *zone)
static const int kNumAllocatableRegisters
static LRegister * Create(int index, Zone *zone)
LifetimePosition Intersect(const UseInterval *other) const
bool HasAnyPolicy() const
static const char * AllocationIndexToString(int index)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if NULL
UsePosition * next() const
void AddUseInterval(LifetimePosition start, LifetimePosition end, Zone *zone)
static HValue * cast(HValue *value)
LOperand * GetSpillOperand() const
static const int kInvalidAssignment
LifetimePosition InstructionEnd() const
bool RegisterIsBeneficial() const