v8  3.14.5(node0.10.28)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
lithium-allocator.cc
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "v8.h"
29 #include "lithium-allocator-inl.h"
30 
31 #include "hydrogen.h"
32 #include "string-stream.h"
33 
34 #if V8_TARGET_ARCH_IA32
35 #include "ia32/lithium-ia32.h"
36 #elif V8_TARGET_ARCH_X64
37 #include "x64/lithium-x64.h"
38 #elif V8_TARGET_ARCH_ARM
39 #include "arm/lithium-arm.h"
40 #elif V8_TARGET_ARCH_MIPS
41 #include "mips/lithium-mips.h"
42 #else
43 #error "Unknown architecture."
44 #endif
45 
46 namespace v8 {
47 namespace internal {
48 
49 static inline LifetimePosition Min(LifetimePosition a, LifetimePosition b) {
50  return a.Value() < b.Value() ? a : b;
51 }
52 
53 
54 static inline LifetimePosition Max(LifetimePosition a, LifetimePosition b) {
55  return a.Value() > b.Value() ? a : b;
56 }
57 
58 
60  : operand_(operand),
61  hint_(NULL),
62  pos_(pos),
63  next_(NULL),
64  requires_reg_(false),
65  register_beneficial_(true) {
66  if (operand_ != NULL && operand_->IsUnallocated()) {
67  LUnallocated* unalloc = LUnallocated::cast(operand_);
68  requires_reg_ = unalloc->HasRegisterPolicy();
69  register_beneficial_ = !unalloc->HasAnyPolicy();
70  }
71  ASSERT(pos_.IsValid());
72 }
73 
74 
75 bool UsePosition::HasHint() const {
76  return hint_ != NULL && !hint_->IsUnallocated();
77 }
78 
79 
81  return requires_reg_;
82 }
83 
84 
86  return register_beneficial_;
87 }
88 
89 
91  ASSERT(Contains(pos) && pos.Value() != start().Value());
92  UseInterval* after = new(zone) UseInterval(pos, end_);
93  after->next_ = next_;
94  next_ = after;
95  end_ = pos;
96 }
97 
98 
99 #ifdef DEBUG
100 
101 
102 void LiveRange::Verify() const {
103  UsePosition* cur = first_pos_;
104  while (cur != NULL) {
105  ASSERT(Start().Value() <= cur->pos().Value() &&
106  cur->pos().Value() <= End().Value());
107  cur = cur->next();
108  }
109 }
110 
111 
112 bool LiveRange::HasOverlap(UseInterval* target) const {
113  UseInterval* current_interval = first_interval_;
114  while (current_interval != NULL) {
115  // Intervals overlap if the start of one is contained in the other.
116  if (current_interval->Contains(target->start()) ||
117  target->Contains(current_interval->start())) {
118  return true;
119  }
120  current_interval = current_interval->next();
121  }
122  return false;
123 }
124 
125 
126 #endif
127 
128 
130  : id_(id),
131  spilled_(false),
132  is_double_(false),
133  assigned_register_(kInvalidAssignment),
134  last_interval_(NULL),
135  first_interval_(NULL),
136  first_pos_(NULL),
137  parent_(NULL),
138  next_(NULL),
139  current_interval_(NULL),
140  last_processed_use_(NULL),
141  spill_operand_(new(zone) LOperand()),
142  spill_start_index_(kMaxInt) { }
143 
144 
146  RegisterKind register_kind,
147  Zone* zone) {
149  assigned_register_ = reg;
150  is_double_ = (register_kind == DOUBLE_REGISTERS);
151  ConvertOperands(zone);
152 }
153 
154 
156  ASSERT(!IsSpilled());
158  spilled_ = true;
159  assigned_register_ = kInvalidAssignment;
160  ConvertOperands(zone);
161 }
162 
163 
165  ASSERT(spill_operand_ != NULL);
166  return !spill_operand_->IsIgnored();
167 }
168 
169 
171  ASSERT(!operand->IsUnallocated());
172  ASSERT(spill_operand_ != NULL);
173  ASSERT(spill_operand_->IsIgnored());
174  spill_operand_->ConvertTo(operand->kind(), operand->index());
175 }
176 
177 
179  UsePosition* use_pos = last_processed_use_;
180  if (use_pos == NULL) use_pos = first_pos();
181  while (use_pos != NULL && use_pos->pos().Value() < start.Value()) {
182  use_pos = use_pos->next();
183  }
184  last_processed_use_ = use_pos;
185  return use_pos;
186 }
187 
188 
190  LifetimePosition start) {
191  UsePosition* pos = NextUsePosition(start);
192  while (pos != NULL && !pos->RegisterIsBeneficial()) {
193  pos = pos->next();
194  }
195  return pos;
196 }
197 
198 
200  UsePosition* pos = NextUsePosition(start);
201  while (pos != NULL && !pos->RequiresRegister()) {
202  pos = pos->next();
203  }
204  return pos;
205 }
206 
207 
209  // TODO(kmillikin): Comment. Now.
210  if (pos.Value() <= Start().Value() && HasRegisterAssigned()) return false;
211 
212  // We cannot spill a live range that has a use requiring a register
213  // at the current or the immediate next position.
214  UsePosition* use_pos = NextRegisterPosition(pos);
215  if (use_pos == NULL) return true;
216  return
217  use_pos->pos().Value() > pos.NextInstruction().InstructionEnd().Value();
218 }
219 
220 
222  UsePosition* pos = first_pos_;
223  while (pos != NULL && !pos->HasHint()) pos = pos->next();
224  return pos;
225 }
226 
227 
229  LOperand* op = NULL;
230  if (HasRegisterAssigned()) {
231  ASSERT(!IsSpilled());
232  if (IsDouble()) {
234  } else {
235  op = LRegister::Create(assigned_register(), zone);
236  }
237  } else if (IsSpilled()) {
239  op = TopLevel()->GetSpillOperand();
240  ASSERT(!op->IsUnallocated());
241  } else {
242  LUnallocated* unalloc = new(zone) LUnallocated(LUnallocated::NONE);
243  unalloc->set_virtual_register(id_);
244  op = unalloc;
245  }
246  return op;
247 }
248 
249 
250 UseInterval* LiveRange::FirstSearchIntervalForPosition(
251  LifetimePosition position) const {
252  if (current_interval_ == NULL) return first_interval_;
253  if (current_interval_->start().Value() > position.Value()) {
254  current_interval_ = NULL;
255  return first_interval_;
256  }
257  return current_interval_;
258 }
259 
260 
261 void LiveRange::AdvanceLastProcessedMarker(
262  UseInterval* to_start_of, LifetimePosition but_not_past) const {
263  if (to_start_of == NULL) return;
264  if (to_start_of->start().Value() > but_not_past.Value()) return;
265  LifetimePosition start =
266  current_interval_ == NULL ? LifetimePosition::Invalid()
267  : current_interval_->start();
268  if (to_start_of->start().Value() > start.Value()) {
269  current_interval_ = to_start_of;
270  }
271 }
272 
273 
275  LiveRange* result,
276  Zone* zone) {
277  ASSERT(Start().Value() < position.Value());
278  ASSERT(result->IsEmpty());
279  // Find the last interval that ends before the position. If the
280  // position is contained in one of the intervals in the chain, we
281  // split that interval and use the first part.
282  UseInterval* current = FirstSearchIntervalForPosition(position);
283 
284  // If the split position coincides with the beginning of a use interval
285  // we need to split use positons in a special way.
286  bool split_at_start = false;
287 
288  if (current->start().Value() == position.Value()) {
289  // When splitting at start we need to locate the previous use interval.
290  current = first_interval_;
291  }
292 
293  while (current != NULL) {
294  if (current->Contains(position)) {
295  current->SplitAt(position, zone);
296  break;
297  }
298  UseInterval* next = current->next();
299  if (next->start().Value() >= position.Value()) {
300  split_at_start = (next->start().Value() == position.Value());
301  break;
302  }
303  current = next;
304  }
305 
306  // Partition original use intervals to the two live ranges.
307  UseInterval* before = current;
308  UseInterval* after = before->next();
309  result->last_interval_ = (last_interval_ == before)
310  ? after // Only interval in the range after split.
311  : last_interval_; // Last interval of the original range.
312  result->first_interval_ = after;
313  last_interval_ = before;
314 
315  // Find the last use position before the split and the first use
316  // position after it.
317  UsePosition* use_after = first_pos_;
318  UsePosition* use_before = NULL;
319  if (split_at_start) {
320  // The split position coincides with the beginning of a use interval (the
321  // end of a lifetime hole). Use at this position should be attributed to
322  // the split child because split child owns use interval covering it.
323  while (use_after != NULL && use_after->pos().Value() < position.Value()) {
324  use_before = use_after;
325  use_after = use_after->next();
326  }
327  } else {
328  while (use_after != NULL && use_after->pos().Value() <= position.Value()) {
329  use_before = use_after;
330  use_after = use_after->next();
331  }
332  }
333 
334  // Partition original use positions to the two live ranges.
335  if (use_before != NULL) {
336  use_before->next_ = NULL;
337  } else {
338  first_pos_ = NULL;
339  }
340  result->first_pos_ = use_after;
341 
342  // Discard cached iteration state. It might be pointing
343  // to the use that no longer belongs to this live range.
344  last_processed_use_ = NULL;
345  current_interval_ = NULL;
346 
347  // Link the new live range in the chain before any of the other
348  // ranges linked from the range before the split.
349  result->parent_ = (parent_ == NULL) ? this : parent_;
350  result->next_ = next_;
351  next_ = result;
352 
353 #ifdef DEBUG
354  Verify();
355  result->Verify();
356 #endif
357 }
358 
359 
360 // This implements an ordering on live ranges so that they are ordered by their
361 // start positions. This is needed for the correctness of the register
362 // allocation algorithm. If two live ranges start at the same offset then there
363 // is a tie breaker based on where the value is first used. This part of the
364 // ordering is merely a heuristic.
366  LifetimePosition start = Start();
367  LifetimePosition other_start = other->Start();
368  if (start.Value() == other_start.Value()) {
369  UsePosition* pos = FirstPosWithHint();
370  if (pos == NULL) return false;
371  UsePosition* other_pos = other->first_pos();
372  if (other_pos == NULL) return true;
373  return pos->pos().Value() < other_pos->pos().Value();
374  }
375  return start.Value() < other_start.Value();
376 }
377 
378 
380  LAllocator::TraceAlloc("Shorten live range %d to [%d\n", id_, start.Value());
381  ASSERT(first_interval_ != NULL);
382  ASSERT(first_interval_->start().Value() <= start.Value());
383  ASSERT(start.Value() < first_interval_->end().Value());
384  first_interval_->set_start(start);
385 }
386 
387 
389  LifetimePosition end,
390  Zone* zone) {
391  LAllocator::TraceAlloc("Ensure live range %d in interval [%d %d[\n",
392  id_,
393  start.Value(),
394  end.Value());
395  LifetimePosition new_end = end;
396  while (first_interval_ != NULL &&
397  first_interval_->start().Value() <= end.Value()) {
398  if (first_interval_->end().Value() > end.Value()) {
399  new_end = first_interval_->end();
400  }
401  first_interval_ = first_interval_->next();
402  }
403 
404  UseInterval* new_interval = new(zone) UseInterval(start, new_end);
405  new_interval->next_ = first_interval_;
406  first_interval_ = new_interval;
407  if (new_interval->next() == NULL) {
408  last_interval_ = new_interval;
409  }
410 }
411 
412 
414  LifetimePosition end,
415  Zone* zone) {
416  LAllocator::TraceAlloc("Add to live range %d interval [%d %d[\n",
417  id_,
418  start.Value(),
419  end.Value());
420  if (first_interval_ == NULL) {
421  UseInterval* interval = new(zone) UseInterval(start, end);
422  first_interval_ = interval;
423  last_interval_ = interval;
424  } else {
425  if (end.Value() == first_interval_->start().Value()) {
426  first_interval_->set_start(start);
427  } else if (end.Value() < first_interval_->start().Value()) {
428  UseInterval* interval = new(zone) UseInterval(start, end);
429  interval->set_next(first_interval_);
430  first_interval_ = interval;
431  } else {
432  // Order of instruction's processing (see ProcessInstructions) guarantees
433  // that each new use interval either precedes or intersects with
434  // last added interval.
435  ASSERT(start.Value() < first_interval_->end().Value());
436  first_interval_->start_ = Min(start, first_interval_->start_);
437  first_interval_->end_ = Max(end, first_interval_->end_);
438  }
439  }
440 }
441 
442 
444  LOperand* operand,
445  Zone* zone) {
446  LAllocator::TraceAlloc("Add to live range %d use position %d\n",
447  id_,
448  pos.Value());
449  UsePosition* use_pos = new(zone) UsePosition(pos, operand);
450  UsePosition* prev = NULL;
451  UsePosition* current = first_pos_;
452  while (current != NULL && current->pos().Value() < pos.Value()) {
453  prev = current;
454  current = current->next();
455  }
456 
457  if (prev == NULL) {
458  use_pos->set_next(first_pos_);
459  first_pos_ = use_pos;
460  } else {
461  use_pos->next_ = prev->next_;
462  prev->next_ = use_pos;
463  }
464 
465  return use_pos;
466 }
467 
468 
469 void LiveRange::ConvertOperands(Zone* zone) {
470  LOperand* op = CreateAssignedOperand(zone);
471  UsePosition* use_pos = first_pos();
472  while (use_pos != NULL) {
473  ASSERT(Start().Value() <= use_pos->pos().Value() &&
474  use_pos->pos().Value() <= End().Value());
475 
476  if (use_pos->HasOperand()) {
477  ASSERT(op->IsRegister() || op->IsDoubleRegister() ||
478  !use_pos->RequiresRegister());
479  use_pos->operand()->ConvertTo(op->kind(), op->index());
480  }
481  use_pos = use_pos->next();
482  }
483 }
484 
485 
486 bool LiveRange::CanCover(LifetimePosition position) const {
487  if (IsEmpty()) return false;
488  return Start().Value() <= position.Value() &&
489  position.Value() < End().Value();
490 }
491 
492 
494  if (!CanCover(position)) return false;
495  UseInterval* start_search = FirstSearchIntervalForPosition(position);
496  for (UseInterval* interval = start_search;
497  interval != NULL;
498  interval = interval->next()) {
499  ASSERT(interval->next() == NULL ||
500  interval->next()->start().Value() >= interval->start().Value());
501  AdvanceLastProcessedMarker(interval, position);
502  if (interval->Contains(position)) return true;
503  if (interval->start().Value() > position.Value()) return false;
504  }
505  return false;
506 }
507 
508 
510  UseInterval* b = other->first_interval();
511  if (b == NULL) return LifetimePosition::Invalid();
512  LifetimePosition advance_last_processed_up_to = b->start();
513  UseInterval* a = FirstSearchIntervalForPosition(b->start());
514  while (a != NULL && b != NULL) {
515  if (a->start().Value() > other->End().Value()) break;
516  if (b->start().Value() > End().Value()) break;
517  LifetimePosition cur_intersection = a->Intersect(b);
518  if (cur_intersection.IsValid()) {
519  return cur_intersection;
520  }
521  if (a->start().Value() < b->start().Value()) {
522  a = a->next();
523  if (a == NULL || a->start().Value() > other->End().Value()) break;
524  AdvanceLastProcessedMarker(a, advance_last_processed_up_to);
525  } else {
526  b = b->next();
527  }
528  }
529  return LifetimePosition::Invalid();
530 }
531 
532 
533 LAllocator::LAllocator(int num_values, HGraph* graph)
534  : zone_(graph->zone()),
535  chunk_(NULL),
536  live_in_sets_(graph->blocks()->length(), zone_),
537  live_ranges_(num_values * 2, zone_),
538  fixed_live_ranges_(NULL),
539  fixed_double_live_ranges_(NULL),
540  unhandled_live_ranges_(num_values * 2, zone_),
541  active_live_ranges_(8, zone_),
542  inactive_live_ranges_(8, zone_),
543  reusable_slots_(8, zone_),
544  next_virtual_register_(num_values),
545  first_artificial_register_(num_values),
546  mode_(GENERAL_REGISTERS),
547  num_registers_(-1),
548  graph_(graph),
549  has_osr_entry_(false),
550  allocation_ok_(true) { }
551 
552 
553 void LAllocator::InitializeLivenessAnalysis() {
554  // Initialize the live_in sets for each block to NULL.
555  int block_count = graph_->blocks()->length();
556  live_in_sets_.Initialize(block_count, zone());
557  live_in_sets_.AddBlock(NULL, block_count, zone());
558 }
559 
560 
561 BitVector* LAllocator::ComputeLiveOut(HBasicBlock* block) {
562  // Compute live out for the given block, except not including backward
563  // successor edges.
564  BitVector* live_out = new(zone_) BitVector(next_virtual_register_, zone_);
565 
566  // Process all successor blocks.
567  for (HSuccessorIterator it(block->end()); !it.Done(); it.Advance()) {
568  // Add values live on entry to the successor. Note the successor's
569  // live_in will not be computed yet for backwards edges.
570  HBasicBlock* successor = it.Current();
571  BitVector* live_in = live_in_sets_[successor->block_id()];
572  if (live_in != NULL) live_out->Union(*live_in);
573 
574  // All phi input operands corresponding to this successor edge are live
575  // out from this block.
576  int index = successor->PredecessorIndexOf(block);
577  const ZoneList<HPhi*>* phis = successor->phis();
578  for (int i = 0; i < phis->length(); ++i) {
579  HPhi* phi = phis->at(i);
580  if (!phi->OperandAt(index)->IsConstant()) {
581  live_out->Add(phi->OperandAt(index)->id());
582  }
583  }
584  }
585 
586  return live_out;
587 }
588 
589 
590 void LAllocator::AddInitialIntervals(HBasicBlock* block,
591  BitVector* live_out) {
592  // Add an interval that includes the entire block to the live range for
593  // each live_out value.
594  LifetimePosition start = LifetimePosition::FromInstructionIndex(
595  block->first_instruction_index());
596  LifetimePosition end = LifetimePosition::FromInstructionIndex(
597  block->last_instruction_index()).NextInstruction();
598  BitVector::Iterator iterator(live_out);
599  while (!iterator.Done()) {
600  int operand_index = iterator.Current();
601  LiveRange* range = LiveRangeFor(operand_index);
602  range->AddUseInterval(start, end, zone_);
603  iterator.Advance();
604  }
605 }
606 
607 
608 int LAllocator::FixedDoubleLiveRangeID(int index) {
609  return -index - 1 - Register::kNumAllocatableRegisters;
610 }
611 
612 
613 LOperand* LAllocator::AllocateFixed(LUnallocated* operand,
614  int pos,
615  bool is_tagged) {
616  TraceAlloc("Allocating fixed reg for op %d\n", operand->virtual_register());
617  ASSERT(operand->HasFixedPolicy());
618  if (operand->policy() == LUnallocated::FIXED_SLOT) {
619  operand->ConvertTo(LOperand::STACK_SLOT, operand->fixed_index());
620  } else if (operand->policy() == LUnallocated::FIXED_REGISTER) {
621  int reg_index = operand->fixed_index();
622  operand->ConvertTo(LOperand::REGISTER, reg_index);
623  } else if (operand->policy() == LUnallocated::FIXED_DOUBLE_REGISTER) {
624  int reg_index = operand->fixed_index();
625  operand->ConvertTo(LOperand::DOUBLE_REGISTER, reg_index);
626  } else {
627  UNREACHABLE();
628  }
629  if (is_tagged) {
630  TraceAlloc("Fixed reg is tagged at %d\n", pos);
631  LInstruction* instr = InstructionAt(pos);
632  if (instr->HasPointerMap()) {
633  instr->pointer_map()->RecordPointer(operand, zone());
634  }
635  }
636  return operand;
637 }
638 
639 
640 LiveRange* LAllocator::FixedLiveRangeFor(int index) {
642  LiveRange* result = fixed_live_ranges_[index];
643  if (result == NULL) {
644  result = new(zone_) LiveRange(FixedLiveRangeID(index), zone_);
645  ASSERT(result->IsFixed());
646  result->set_assigned_register(index, GENERAL_REGISTERS, zone_);
647  fixed_live_ranges_[index] = result;
648  }
649  return result;
650 }
651 
652 
653 LiveRange* LAllocator::FixedDoubleLiveRangeFor(int index) {
655  LiveRange* result = fixed_double_live_ranges_[index];
656  if (result == NULL) {
657  result = new(zone_) LiveRange(FixedDoubleLiveRangeID(index), zone_);
658  ASSERT(result->IsFixed());
659  result->set_assigned_register(index, DOUBLE_REGISTERS, zone_);
660  fixed_double_live_ranges_[index] = result;
661  }
662  return result;
663 }
664 
665 
666 LiveRange* LAllocator::LiveRangeFor(int index) {
667  if (index >= live_ranges_.length()) {
668  live_ranges_.AddBlock(NULL, index - live_ranges_.length() + 1, zone());
669  }
670  LiveRange* result = live_ranges_[index];
671  if (result == NULL) {
672  result = new(zone_) LiveRange(index, zone_);
673  live_ranges_[index] = result;
674  }
675  return result;
676 }
677 
678 
679 LGap* LAllocator::GetLastGap(HBasicBlock* block) {
680  int last_instruction = block->last_instruction_index();
681  int index = chunk_->NearestGapPos(last_instruction);
682  return GapAt(index);
683 }
684 
685 
686 HPhi* LAllocator::LookupPhi(LOperand* operand) const {
687  if (!operand->IsUnallocated()) return NULL;
688  int index = LUnallocated::cast(operand)->virtual_register();
689  HValue* instr = graph_->LookupValue(index);
690  if (instr != NULL && instr->IsPhi()) {
691  return HPhi::cast(instr);
692  }
693  return NULL;
694 }
695 
696 
697 LiveRange* LAllocator::LiveRangeFor(LOperand* operand) {
698  if (operand->IsUnallocated()) {
699  return LiveRangeFor(LUnallocated::cast(operand)->virtual_register());
700  } else if (operand->IsRegister()) {
701  return FixedLiveRangeFor(operand->index());
702  } else if (operand->IsDoubleRegister()) {
703  return FixedDoubleLiveRangeFor(operand->index());
704  } else {
705  return NULL;
706  }
707 }
708 
709 
710 void LAllocator::Define(LifetimePosition position,
711  LOperand* operand,
712  LOperand* hint) {
713  LiveRange* range = LiveRangeFor(operand);
714  if (range == NULL) return;
715 
716  if (range->IsEmpty() || range->Start().Value() > position.Value()) {
717  // Can happen if there is a definition without use.
718  range->AddUseInterval(position, position.NextInstruction(), zone_);
719  range->AddUsePosition(position.NextInstruction(), NULL, zone_);
720  } else {
721  range->ShortenTo(position);
722  }
723 
724  if (operand->IsUnallocated()) {
725  LUnallocated* unalloc_operand = LUnallocated::cast(operand);
726  range->AddUsePosition(position, unalloc_operand, zone_)->set_hint(hint);
727  }
728 }
729 
730 
731 void LAllocator::Use(LifetimePosition block_start,
732  LifetimePosition position,
733  LOperand* operand,
734  LOperand* hint) {
735  LiveRange* range = LiveRangeFor(operand);
736  if (range == NULL) return;
737  if (operand->IsUnallocated()) {
738  LUnallocated* unalloc_operand = LUnallocated::cast(operand);
739  range->AddUsePosition(position, unalloc_operand, zone_)->set_hint(hint);
740  }
741  range->AddUseInterval(block_start, position, zone_);
742 }
743 
744 
745 void LAllocator::AddConstraintsGapMove(int index,
746  LOperand* from,
747  LOperand* to) {
748  LGap* gap = GapAt(index);
749  LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START, zone());
750  if (from->IsUnallocated()) {
751  const ZoneList<LMoveOperands>* move_operands = move->move_operands();
752  for (int i = 0; i < move_operands->length(); ++i) {
753  LMoveOperands cur = move_operands->at(i);
754  LOperand* cur_to = cur.destination();
755  if (cur_to->IsUnallocated()) {
756  if (LUnallocated::cast(cur_to)->virtual_register() ==
758  move->AddMove(cur.source(), to, zone());
759  return;
760  }
761  }
762  }
763  }
764  move->AddMove(from, to, zone());
765 }
766 
767 
768 void LAllocator::MeetRegisterConstraints(HBasicBlock* block) {
769  int start = block->first_instruction_index();
770  int end = block->last_instruction_index();
771  for (int i = start; i <= end; ++i) {
772  if (IsGapAt(i)) {
773  LInstruction* instr = NULL;
774  LInstruction* prev_instr = NULL;
775  if (i < end) instr = InstructionAt(i + 1);
776  if (i > start) prev_instr = InstructionAt(i - 1);
777  MeetConstraintsBetween(prev_instr, instr, i);
778  if (!AllocationOk()) return;
779  }
780  }
781 }
782 
783 
784 void LAllocator::MeetConstraintsBetween(LInstruction* first,
785  LInstruction* second,
786  int gap_index) {
787  // Handle fixed temporaries.
788  if (first != NULL) {
789  for (TempIterator it(first); !it.Done(); it.Advance()) {
790  LUnallocated* temp = LUnallocated::cast(it.Current());
791  if (temp->HasFixedPolicy()) {
792  AllocateFixed(temp, gap_index - 1, false);
793  }
794  }
795  }
796 
797  // Handle fixed output operand.
798  if (first != NULL && first->Output() != NULL) {
799  LUnallocated* first_output = LUnallocated::cast(first->Output());
800  LiveRange* range = LiveRangeFor(first_output->virtual_register());
801  bool assigned = false;
802  if (first_output->HasFixedPolicy()) {
803  LUnallocated* output_copy = first_output->CopyUnconstrained(zone());
804  bool is_tagged = HasTaggedValue(first_output->virtual_register());
805  AllocateFixed(first_output, gap_index, is_tagged);
806 
807  // This value is produced on the stack, we never need to spill it.
808  if (first_output->IsStackSlot()) {
809  range->SetSpillOperand(first_output);
810  range->SetSpillStartIndex(gap_index - 1);
811  assigned = true;
812  }
813  chunk_->AddGapMove(gap_index, first_output, output_copy);
814  }
815 
816  if (!assigned) {
817  range->SetSpillStartIndex(gap_index);
818 
819  // This move to spill operand is not a real use. Liveness analysis
820  // and splitting of live ranges do not account for it.
821  // Thus it should be inserted to a lifetime position corresponding to
822  // the instruction end.
823  LGap* gap = GapAt(gap_index);
824  LParallelMove* move = gap->GetOrCreateParallelMove(LGap::BEFORE, zone());
825  move->AddMove(first_output, range->GetSpillOperand(), zone());
826  }
827  }
828 
829  // Handle fixed input operands of second instruction.
830  if (second != NULL) {
831  for (UseIterator it(second); !it.Done(); it.Advance()) {
832  LUnallocated* cur_input = LUnallocated::cast(it.Current());
833  if (cur_input->HasFixedPolicy()) {
834  LUnallocated* input_copy = cur_input->CopyUnconstrained(zone());
835  bool is_tagged = HasTaggedValue(cur_input->virtual_register());
836  AllocateFixed(cur_input, gap_index + 1, is_tagged);
837  AddConstraintsGapMove(gap_index, input_copy, cur_input);
838  } else if (cur_input->policy() == LUnallocated::WRITABLE_REGISTER) {
839  // The live range of writable input registers always goes until the end
840  // of the instruction.
841  ASSERT(!cur_input->IsUsedAtStart());
842 
843  LUnallocated* input_copy = cur_input->CopyUnconstrained(zone());
844  cur_input->set_virtual_register(GetVirtualRegister());
845  if (!AllocationOk()) return;
846 
847  if (RequiredRegisterKind(input_copy->virtual_register()) ==
849  double_artificial_registers_.Add(
850  cur_input->virtual_register() - first_artificial_register_,
851  zone_);
852  }
853 
854  AddConstraintsGapMove(gap_index, input_copy, cur_input);
855  }
856  }
857  }
858 
859  // Handle "output same as input" for second instruction.
860  if (second != NULL && second->Output() != NULL) {
861  LUnallocated* second_output = LUnallocated::cast(second->Output());
862  if (second_output->HasSameAsInputPolicy()) {
863  LUnallocated* cur_input = LUnallocated::cast(second->FirstInput());
864  int output_vreg = second_output->virtual_register();
865  int input_vreg = cur_input->virtual_register();
866 
867  LUnallocated* input_copy = cur_input->CopyUnconstrained(zone());
868  cur_input->set_virtual_register(second_output->virtual_register());
869  AddConstraintsGapMove(gap_index, input_copy, cur_input);
870 
871  if (HasTaggedValue(input_vreg) && !HasTaggedValue(output_vreg)) {
872  int index = gap_index + 1;
873  LInstruction* instr = InstructionAt(index);
874  if (instr->HasPointerMap()) {
875  instr->pointer_map()->RecordPointer(input_copy, zone());
876  }
877  } else if (!HasTaggedValue(input_vreg) && HasTaggedValue(output_vreg)) {
878  // The input is assumed to immediately have a tagged representation,
879  // before the pointer map can be used. I.e. the pointer map at the
880  // instruction will include the output operand (whose value at the
881  // beginning of the instruction is equal to the input operand). If
882  // this is not desired, then the pointer map at this instruction needs
883  // to be adjusted manually.
884  }
885  }
886  }
887 }
888 
889 
890 void LAllocator::ProcessInstructions(HBasicBlock* block, BitVector* live) {
891  int block_start = block->first_instruction_index();
892  int index = block->last_instruction_index();
893 
894  LifetimePosition block_start_position =
896 
897  while (index >= block_start) {
898  LifetimePosition curr_position =
900 
901  if (IsGapAt(index)) {
902  // We have a gap at this position.
903  LGap* gap = GapAt(index);
904  LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START, zone());
905  const ZoneList<LMoveOperands>* move_operands = move->move_operands();
906  for (int i = 0; i < move_operands->length(); ++i) {
907  LMoveOperands* cur = &move_operands->at(i);
908  if (cur->IsIgnored()) continue;
909  LOperand* from = cur->source();
910  LOperand* to = cur->destination();
911  HPhi* phi = LookupPhi(to);
912  LOperand* hint = to;
913  if (phi != NULL) {
914  // This is a phi resolving move.
915  if (!phi->block()->IsLoopHeader()) {
916  hint = LiveRangeFor(phi->id())->FirstHint();
917  }
918  } else {
919  if (to->IsUnallocated()) {
920  if (live->Contains(LUnallocated::cast(to)->virtual_register())) {
921  Define(curr_position, to, from);
922  live->Remove(LUnallocated::cast(to)->virtual_register());
923  } else {
924  cur->Eliminate();
925  continue;
926  }
927  } else {
928  Define(curr_position, to, from);
929  }
930  }
931  Use(block_start_position, curr_position, from, hint);
932  if (from->IsUnallocated()) {
933  live->Add(LUnallocated::cast(from)->virtual_register());
934  }
935  }
936  } else {
937  ASSERT(!IsGapAt(index));
938  LInstruction* instr = InstructionAt(index);
939 
940  if (instr != NULL) {
941  LOperand* output = instr->Output();
942  if (output != NULL) {
943  if (output->IsUnallocated()) {
944  live->Remove(LUnallocated::cast(output)->virtual_register());
945  }
946  Define(curr_position, output, NULL);
947  }
948 
949  if (instr->IsMarkedAsCall()) {
950  for (int i = 0; i < Register::kNumAllocatableRegisters; ++i) {
951  if (output == NULL || !output->IsRegister() ||
952  output->index() != i) {
953  LiveRange* range = FixedLiveRangeFor(i);
954  range->AddUseInterval(curr_position,
955  curr_position.InstructionEnd(),
956  zone_);
957  }
958  }
959  }
960 
961  if (instr->IsMarkedAsCall()) {
962  for (int i = 0; i < DoubleRegister::kNumAllocatableRegisters; ++i) {
963  if (output == NULL || !output->IsDoubleRegister() ||
964  output->index() != i) {
965  LiveRange* range = FixedDoubleLiveRangeFor(i);
966  range->AddUseInterval(curr_position,
967  curr_position.InstructionEnd(),
968  zone_);
969  }
970  }
971  }
972 
973  for (UseIterator it(instr); !it.Done(); it.Advance()) {
974  LOperand* input = it.Current();
975 
976  LifetimePosition use_pos;
977  if (input->IsUnallocated() &&
978  LUnallocated::cast(input)->IsUsedAtStart()) {
979  use_pos = curr_position;
980  } else {
981  use_pos = curr_position.InstructionEnd();
982  }
983 
984  Use(block_start_position, use_pos, input, NULL);
985  if (input->IsUnallocated()) {
986  live->Add(LUnallocated::cast(input)->virtual_register());
987  }
988  }
989 
990  for (TempIterator it(instr); !it.Done(); it.Advance()) {
991  LOperand* temp = it.Current();
992  if (instr->IsMarkedAsCall()) {
993  if (temp->IsRegister()) continue;
994  if (temp->IsUnallocated()) {
995  LUnallocated* temp_unalloc = LUnallocated::cast(temp);
996  if (temp_unalloc->HasFixedPolicy()) {
997  continue;
998  }
999  }
1000  }
1001  Use(block_start_position, curr_position.InstructionEnd(), temp, NULL);
1002  Define(curr_position, temp, NULL);
1003  }
1004  }
1005  }
1006 
1007  index = index - 1;
1008  }
1009 }
1010 
1011 
1012 void LAllocator::ResolvePhis(HBasicBlock* block) {
1013  const ZoneList<HPhi*>* phis = block->phis();
1014  for (int i = 0; i < phis->length(); ++i) {
1015  HPhi* phi = phis->at(i);
1016  LUnallocated* phi_operand = new(zone_) LUnallocated(LUnallocated::NONE);
1017  phi_operand->set_virtual_register(phi->id());
1018  for (int j = 0; j < phi->OperandCount(); ++j) {
1019  HValue* op = phi->OperandAt(j);
1020  LOperand* operand = NULL;
1021  if (op->IsConstant() && op->EmitAtUses()) {
1022  HConstant* constant = HConstant::cast(op);
1023  operand = chunk_->DefineConstantOperand(constant);
1024  } else {
1025  ASSERT(!op->EmitAtUses());
1026  LUnallocated* unalloc = new(zone_) LUnallocated(LUnallocated::ANY);
1027  unalloc->set_virtual_register(op->id());
1028  operand = unalloc;
1029  }
1030  HBasicBlock* cur_block = block->predecessors()->at(j);
1031  // The gap move must be added without any special processing as in
1032  // the AddConstraintsGapMove.
1033  chunk_->AddGapMove(cur_block->last_instruction_index() - 1,
1034  operand,
1035  phi_operand);
1036 
1037  // We are going to insert a move before the branch instruction.
1038  // Some branch instructions (e.g. loops' back edges)
1039  // can potentially cause a GC so they have a pointer map.
1040  // By inserting a move we essentially create a copy of a
1041  // value which is invisible to PopulatePointerMaps(), because we store
1042  // it into a location different from the operand of a live range
1043  // covering a branch instruction.
1044  // Thus we need to manually record a pointer.
1045  LInstruction* branch =
1046  InstructionAt(cur_block->last_instruction_index());
1047  if (branch->HasPointerMap()) {
1048  if (phi->representation().IsTagged()) {
1049  branch->pointer_map()->RecordPointer(phi_operand, zone());
1050  } else if (!phi->representation().IsDouble()) {
1051  branch->pointer_map()->RecordUntagged(phi_operand, zone());
1052  }
1053  }
1054  }
1055 
1056  LiveRange* live_range = LiveRangeFor(phi->id());
1057  LLabel* label = chunk_->GetLabel(phi->block()->block_id());
1058  label->GetOrCreateParallelMove(LGap::START, zone())->
1059  AddMove(phi_operand, live_range->GetSpillOperand(), zone());
1060  live_range->SetSpillStartIndex(phi->block()->first_instruction_index());
1061  }
1062 }
1063 
1064 
1065 bool LAllocator::Allocate(LChunk* chunk) {
1066  ASSERT(chunk_ == NULL);
1067  chunk_ = static_cast<LPlatformChunk*>(chunk);
1068  MeetRegisterConstraints();
1069  if (!AllocationOk()) return false;
1070  ResolvePhis();
1071  BuildLiveRanges();
1072  AllocateGeneralRegisters();
1073  if (!AllocationOk()) return false;
1074  AllocateDoubleRegisters();
1075  if (!AllocationOk()) return false;
1076  PopulatePointerMaps();
1077  if (has_osr_entry_) ProcessOsrEntry();
1078  ConnectRanges();
1079  ResolveControlFlow();
1080  return true;
1081 }
1082 
1083 
1084 void LAllocator::MeetRegisterConstraints() {
1085  HPhase phase("L_Register constraints", chunk_);
1086  first_artificial_register_ = next_virtual_register_;
1087  const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
1088  for (int i = 0; i < blocks->length(); ++i) {
1089  HBasicBlock* block = blocks->at(i);
1090  MeetRegisterConstraints(block);
1091  if (!AllocationOk()) return;
1092  }
1093 }
1094 
1095 
1096 void LAllocator::ResolvePhis() {
1097  HPhase phase("L_Resolve phis", chunk_);
1098 
1099  // Process the blocks in reverse order.
1100  const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
1101  for (int block_id = blocks->length() - 1; block_id >= 0; --block_id) {
1102  HBasicBlock* block = blocks->at(block_id);
1103  ResolvePhis(block);
1104  }
1105 }
1106 
1107 
1108 void LAllocator::ResolveControlFlow(LiveRange* range,
1109  HBasicBlock* block,
1110  HBasicBlock* pred) {
1111  LifetimePosition pred_end =
1112  LifetimePosition::FromInstructionIndex(pred->last_instruction_index());
1113  LifetimePosition cur_start =
1114  LifetimePosition::FromInstructionIndex(block->first_instruction_index());
1115  LiveRange* pred_cover = NULL;
1116  LiveRange* cur_cover = NULL;
1117  LiveRange* cur_range = range;
1118  while (cur_range != NULL && (cur_cover == NULL || pred_cover == NULL)) {
1119  if (cur_range->CanCover(cur_start)) {
1120  ASSERT(cur_cover == NULL);
1121  cur_cover = cur_range;
1122  }
1123  if (cur_range->CanCover(pred_end)) {
1124  ASSERT(pred_cover == NULL);
1125  pred_cover = cur_range;
1126  }
1127  cur_range = cur_range->next();
1128  }
1129 
1130  if (cur_cover->IsSpilled()) return;
1131  ASSERT(pred_cover != NULL && cur_cover != NULL);
1132  if (pred_cover != cur_cover) {
1133  LOperand* pred_op = pred_cover->CreateAssignedOperand(zone_);
1134  LOperand* cur_op = cur_cover->CreateAssignedOperand(zone_);
1135  if (!pred_op->Equals(cur_op)) {
1136  LGap* gap = NULL;
1137  if (block->predecessors()->length() == 1) {
1138  gap = GapAt(block->first_instruction_index());
1139  } else {
1140  ASSERT(pred->end()->SecondSuccessor() == NULL);
1141  gap = GetLastGap(pred);
1142 
1143  // We are going to insert a move before the branch instruction.
1144  // Some branch instructions (e.g. loops' back edges)
1145  // can potentially cause a GC so they have a pointer map.
1146  // By inserting a move we essentially create a copy of a
1147  // value which is invisible to PopulatePointerMaps(), because we store
1148  // it into a location different from the operand of a live range
1149  // covering a branch instruction.
1150  // Thus we need to manually record a pointer.
1151  LInstruction* branch = InstructionAt(pred->last_instruction_index());
1152  if (branch->HasPointerMap()) {
1153  if (HasTaggedValue(range->id())) {
1154  branch->pointer_map()->RecordPointer(cur_op, zone());
1155  } else if (!cur_op->IsDoubleStackSlot() &&
1156  !cur_op->IsDoubleRegister()) {
1157  branch->pointer_map()->RemovePointer(cur_op);
1158  }
1159  }
1160  }
1161  gap->GetOrCreateParallelMove(
1162  LGap::START, zone())->AddMove(pred_op, cur_op, zone());
1163  }
1164  }
1165 }
1166 
1167 
1168 LParallelMove* LAllocator::GetConnectingParallelMove(LifetimePosition pos) {
1169  int index = pos.InstructionIndex();
1170  if (IsGapAt(index)) {
1171  LGap* gap = GapAt(index);
1172  return gap->GetOrCreateParallelMove(
1173  pos.IsInstructionStart() ? LGap::START : LGap::END, zone());
1174  }
1175  int gap_pos = pos.IsInstructionStart() ? (index - 1) : (index + 1);
1176  return GapAt(gap_pos)->GetOrCreateParallelMove(
1177  (gap_pos < index) ? LGap::AFTER : LGap::BEFORE, zone());
1178 }
1179 
1180 
1181 HBasicBlock* LAllocator::GetBlock(LifetimePosition pos) {
1182  LGap* gap = GapAt(chunk_->NearestGapPos(pos.InstructionIndex()));
1183  return gap->block();
1184 }
1185 
1186 
1187 void LAllocator::ConnectRanges() {
1188  HPhase phase("L_Connect ranges", this);
1189  for (int i = 0; i < live_ranges()->length(); ++i) {
1190  LiveRange* first_range = live_ranges()->at(i);
1191  if (first_range == NULL || first_range->parent() != NULL) continue;
1192 
1193  LiveRange* second_range = first_range->next();
1194  while (second_range != NULL) {
1195  LifetimePosition pos = second_range->Start();
1196 
1197  if (!second_range->IsSpilled()) {
1198  // Add gap move if the two live ranges touch and there is no block
1199  // boundary.
1200  if (first_range->End().Value() == pos.Value()) {
1201  bool should_insert = true;
1202  if (IsBlockBoundary(pos)) {
1203  should_insert = CanEagerlyResolveControlFlow(GetBlock(pos));
1204  }
1205  if (should_insert) {
1206  LParallelMove* move = GetConnectingParallelMove(pos);
1207  LOperand* prev_operand = first_range->CreateAssignedOperand(zone_);
1208  LOperand* cur_operand = second_range->CreateAssignedOperand(zone_);
1209  move->AddMove(prev_operand, cur_operand, zone());
1210  }
1211  }
1212  }
1213 
1214  first_range = second_range;
1215  second_range = second_range->next();
1216  }
1217  }
1218 }
1219 
1220 
1221 bool LAllocator::CanEagerlyResolveControlFlow(HBasicBlock* block) const {
1222  if (block->predecessors()->length() != 1) return false;
1223  return block->predecessors()->first()->block_id() == block->block_id() - 1;
1224 }
1225 
1226 
1227 void LAllocator::ResolveControlFlow() {
1228  HPhase phase("L_Resolve control flow", this);
1229  const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
1230  for (int block_id = 1; block_id < blocks->length(); ++block_id) {
1231  HBasicBlock* block = blocks->at(block_id);
1232  if (CanEagerlyResolveControlFlow(block)) continue;
1233  BitVector* live = live_in_sets_[block->block_id()];
1234  BitVector::Iterator iterator(live);
1235  while (!iterator.Done()) {
1236  int operand_index = iterator.Current();
1237  for (int i = 0; i < block->predecessors()->length(); ++i) {
1238  HBasicBlock* cur = block->predecessors()->at(i);
1239  LiveRange* cur_range = LiveRangeFor(operand_index);
1240  ResolveControlFlow(cur_range, block, cur);
1241  }
1242  iterator.Advance();
1243  }
1244  }
1245 }
1246 
1247 
1248 void LAllocator::BuildLiveRanges() {
1249  HPhase phase("L_Build live ranges", this);
1250  InitializeLivenessAnalysis();
1251  // Process the blocks in reverse order.
1252  const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
1253  for (int block_id = blocks->length() - 1; block_id >= 0; --block_id) {
1254  HBasicBlock* block = blocks->at(block_id);
1255  BitVector* live = ComputeLiveOut(block);
1256  // Initially consider all live_out values live for the entire block. We
1257  // will shorten these intervals if necessary.
1258  AddInitialIntervals(block, live);
1259 
1260  // Process the instructions in reverse order, generating and killing
1261  // live values.
1262  ProcessInstructions(block, live);
1263  // All phi output operands are killed by this block.
1264  const ZoneList<HPhi*>* phis = block->phis();
1265  for (int i = 0; i < phis->length(); ++i) {
1266  // The live range interval already ends at the first instruction of the
1267  // block.
1268  HPhi* phi = phis->at(i);
1269  live->Remove(phi->id());
1270 
1271  LOperand* hint = NULL;
1272  LOperand* phi_operand = NULL;
1273  LGap* gap = GetLastGap(phi->block()->predecessors()->at(0));
1274  LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START, zone());
1275  for (int j = 0; j < move->move_operands()->length(); ++j) {
1276  LOperand* to = move->move_operands()->at(j).destination();
1277  if (to->IsUnallocated() &&
1278  LUnallocated::cast(to)->virtual_register() == phi->id()) {
1279  hint = move->move_operands()->at(j).source();
1280  phi_operand = to;
1281  break;
1282  }
1283  }
1284  ASSERT(hint != NULL);
1285 
1286  LifetimePosition block_start = LifetimePosition::FromInstructionIndex(
1287  block->first_instruction_index());
1288  Define(block_start, phi_operand, hint);
1289  }
1290 
1291  // Now live is live_in for this block except not including values live
1292  // out on backward successor edges.
1293  live_in_sets_[block_id] = live;
1294 
1295  // If this block is a loop header go back and patch up the necessary
1296  // predecessor blocks.
1297  if (block->IsLoopHeader()) {
1298  // TODO(kmillikin): Need to be able to get the last block of the loop
1299  // in the loop information. Add a live range stretching from the first
1300  // loop instruction to the last for each value live on entry to the
1301  // header.
1302  HBasicBlock* back_edge = block->loop_information()->GetLastBackEdge();
1303  BitVector::Iterator iterator(live);
1304  LifetimePosition start = LifetimePosition::FromInstructionIndex(
1305  block->first_instruction_index());
1306  LifetimePosition end = LifetimePosition::FromInstructionIndex(
1307  back_edge->last_instruction_index()).NextInstruction();
1308  while (!iterator.Done()) {
1309  int operand_index = iterator.Current();
1310  LiveRange* range = LiveRangeFor(operand_index);
1311  range->EnsureInterval(start, end, zone_);
1312  iterator.Advance();
1313  }
1314 
1315  for (int i = block->block_id() + 1; i <= back_edge->block_id(); ++i) {
1316  live_in_sets_[i]->Union(*live);
1317  }
1318  }
1319 
1320 #ifdef DEBUG
1321  if (block_id == 0) {
1322  BitVector::Iterator iterator(live);
1323  bool found = false;
1324  while (!iterator.Done()) {
1325  found = true;
1326  int operand_index = iterator.Current();
1327  PrintF("Function: %s\n",
1328  *chunk_->info()->function()->debug_name()->ToCString());
1329  PrintF("Value %d used before first definition!\n", operand_index);
1330  LiveRange* range = LiveRangeFor(operand_index);
1331  PrintF("First use is at %d\n", range->first_pos()->pos().Value());
1332  iterator.Advance();
1333  }
1334  ASSERT(!found);
1335  }
1336 #endif
1337  }
1338 }
1339 
1340 
1341 bool LAllocator::SafePointsAreInOrder() const {
1342  const ZoneList<LPointerMap*>* pointer_maps = chunk_->pointer_maps();
1343  int safe_point = 0;
1344  for (int i = 0; i < pointer_maps->length(); ++i) {
1345  LPointerMap* map = pointer_maps->at(i);
1346  if (safe_point > map->lithium_position()) return false;
1347  safe_point = map->lithium_position();
1348  }
1349  return true;
1350 }
1351 
1352 
1353 void LAllocator::PopulatePointerMaps() {
1354  HPhase phase("L_Populate pointer maps", this);
1355  const ZoneList<LPointerMap*>* pointer_maps = chunk_->pointer_maps();
1356 
1357  ASSERT(SafePointsAreInOrder());
1358 
1359  // Iterate over all safe point positions and record a pointer
1360  // for all spilled live ranges at this point.
1361  int first_safe_point_index = 0;
1362  int last_range_start = 0;
1363  for (int range_idx = 0; range_idx < live_ranges()->length(); ++range_idx) {
1364  LiveRange* range = live_ranges()->at(range_idx);
1365  if (range == NULL) continue;
1366  // Iterate over the first parts of multi-part live ranges.
1367  if (range->parent() != NULL) continue;
1368  // Skip non-pointer values.
1369  if (!HasTaggedValue(range->id())) continue;
1370  // Skip empty live ranges.
1371  if (range->IsEmpty()) continue;
1372 
1373  // Find the extent of the range and its children.
1374  int start = range->Start().InstructionIndex();
1375  int end = 0;
1376  for (LiveRange* cur = range; cur != NULL; cur = cur->next()) {
1377  LifetimePosition this_end = cur->End();
1378  if (this_end.InstructionIndex() > end) end = this_end.InstructionIndex();
1379  ASSERT(cur->Start().InstructionIndex() >= start);
1380  }
1381 
1382  // Most of the ranges are in order, but not all. Keep an eye on when
1383  // they step backwards and reset the first_safe_point_index so we don't
1384  // miss any safe points.
1385  if (start < last_range_start) {
1386  first_safe_point_index = 0;
1387  }
1388  last_range_start = start;
1389 
1390  // Step across all the safe points that are before the start of this range,
1391  // recording how far we step in order to save doing this for the next range.
1392  while (first_safe_point_index < pointer_maps->length()) {
1393  LPointerMap* map = pointer_maps->at(first_safe_point_index);
1394  int safe_point = map->lithium_position();
1395  if (safe_point >= start) break;
1396  first_safe_point_index++;
1397  }
1398 
1399  // Step through the safe points to see whether they are in the range.
1400  for (int safe_point_index = first_safe_point_index;
1401  safe_point_index < pointer_maps->length();
1402  ++safe_point_index) {
1403  LPointerMap* map = pointer_maps->at(safe_point_index);
1404  int safe_point = map->lithium_position();
1405 
1406  // The safe points are sorted so we can stop searching here.
1407  if (safe_point - 1 > end) break;
1408 
1409  // Advance to the next active range that covers the current
1410  // safe point position.
1411  LifetimePosition safe_point_pos =
1413  LiveRange* cur = range;
1414  while (cur != NULL && !cur->Covers(safe_point_pos.PrevInstruction())) {
1415  cur = cur->next();
1416  }
1417  if (cur == NULL) continue;
1418 
1419  // Check if the live range is spilled and the safe point is after
1420  // the spill position.
1421  if (range->HasAllocatedSpillOperand() &&
1422  safe_point >= range->spill_start_index()) {
1423  TraceAlloc("Pointer for range %d (spilled at %d) at safe point %d\n",
1424  range->id(), range->spill_start_index(), safe_point);
1425  map->RecordPointer(range->GetSpillOperand(), zone());
1426  }
1427 
1428  if (!cur->IsSpilled()) {
1429  TraceAlloc("Pointer in register for range %d (start at %d) "
1430  "at safe point %d\n",
1431  cur->id(), cur->Start().Value(), safe_point);
1432  LOperand* operand = cur->CreateAssignedOperand(zone_);
1433  ASSERT(!operand->IsStackSlot());
1434  map->RecordPointer(operand, zone());
1435  }
1436  }
1437  }
1438 }
1439 
1440 
1441 void LAllocator::ProcessOsrEntry() {
1442  const ZoneList<LInstruction*>* instrs = chunk_->instructions();
1443 
1444  // Linear search for the OSR entry instruction in the chunk.
1445  int index = -1;
1446  while (++index < instrs->length() &&
1447  !instrs->at(index)->IsOsrEntry()) {
1448  }
1449  ASSERT(index < instrs->length());
1450  LOsrEntry* instruction = LOsrEntry::cast(instrs->at(index));
1451 
1452  LifetimePosition position = LifetimePosition::FromInstructionIndex(index);
1453  for (int i = 0; i < live_ranges()->length(); ++i) {
1454  LiveRange* range = live_ranges()->at(i);
1455  if (range != NULL) {
1456  if (range->Covers(position) &&
1457  range->HasRegisterAssigned() &&
1458  range->TopLevel()->HasAllocatedSpillOperand()) {
1459  int reg_index = range->assigned_register();
1460  LOperand* spill_operand = range->TopLevel()->GetSpillOperand();
1461  if (range->IsDouble()) {
1462  instruction->MarkSpilledDoubleRegister(reg_index, spill_operand);
1463  } else {
1464  instruction->MarkSpilledRegister(reg_index, spill_operand);
1465  }
1466  }
1467  }
1468  }
1469 }
1470 
1471 
1472 void LAllocator::AllocateGeneralRegisters() {
1473  HPhase phase("L_Allocate general registers", this);
1474  num_registers_ = Register::kNumAllocatableRegisters;
1475  AllocateRegisters();
1476 }
1477 
1478 
1479 void LAllocator::AllocateDoubleRegisters() {
1480  HPhase phase("L_Allocate double registers", this);
1482  mode_ = DOUBLE_REGISTERS;
1483  AllocateRegisters();
1484 }
1485 
1486 
1487 void LAllocator::AllocateRegisters() {
1488  ASSERT(unhandled_live_ranges_.is_empty());
1489 
1490  for (int i = 0; i < live_ranges_.length(); ++i) {
1491  if (live_ranges_[i] != NULL) {
1492  if (RequiredRegisterKind(live_ranges_[i]->id()) == mode_) {
1493  AddToUnhandledUnsorted(live_ranges_[i]);
1494  }
1495  }
1496  }
1497  SortUnhandled();
1498  ASSERT(UnhandledIsSorted());
1499 
1500  ASSERT(reusable_slots_.is_empty());
1501  ASSERT(active_live_ranges_.is_empty());
1502  ASSERT(inactive_live_ranges_.is_empty());
1503 
1504  if (mode_ == DOUBLE_REGISTERS) {
1505  for (int i = 0; i < fixed_double_live_ranges_.length(); ++i) {
1506  LiveRange* current = fixed_double_live_ranges_.at(i);
1507  if (current != NULL) {
1508  AddToInactive(current);
1509  }
1510  }
1511  } else {
1512  for (int i = 0; i < fixed_live_ranges_.length(); ++i) {
1513  LiveRange* current = fixed_live_ranges_.at(i);
1514  if (current != NULL) {
1515  AddToInactive(current);
1516  }
1517  }
1518  }
1519 
1520  while (!unhandled_live_ranges_.is_empty()) {
1521  ASSERT(UnhandledIsSorted());
1522  LiveRange* current = unhandled_live_ranges_.RemoveLast();
1523  ASSERT(UnhandledIsSorted());
1524  LifetimePosition position = current->Start();
1525  TraceAlloc("Processing interval %d start=%d\n",
1526  current->id(),
1527  position.Value());
1528 
1529  if (current->HasAllocatedSpillOperand()) {
1530  TraceAlloc("Live range %d already has a spill operand\n", current->id());
1531  LifetimePosition next_pos = position;
1532  if (IsGapAt(next_pos.InstructionIndex())) {
1533  next_pos = next_pos.NextInstruction();
1534  }
1535  UsePosition* pos = current->NextUsePositionRegisterIsBeneficial(next_pos);
1536  // If the range already has a spill operand and it doesn't need a
1537  // register immediately, split it and spill the first part of the range.
1538  if (pos == NULL) {
1539  Spill(current);
1540  continue;
1541  } else if (pos->pos().Value() >
1542  current->Start().NextInstruction().Value()) {
1543  // Do not spill live range eagerly if use position that can benefit from
1544  // the register is too close to the start of live range.
1545  SpillBetween(current, current->Start(), pos->pos());
1546  if (!AllocationOk()) return;
1547  ASSERT(UnhandledIsSorted());
1548  continue;
1549  }
1550  }
1551 
1552  for (int i = 0; i < active_live_ranges_.length(); ++i) {
1553  LiveRange* cur_active = active_live_ranges_.at(i);
1554  if (cur_active->End().Value() <= position.Value()) {
1555  ActiveToHandled(cur_active);
1556  --i; // The live range was removed from the list of active live ranges.
1557  } else if (!cur_active->Covers(position)) {
1558  ActiveToInactive(cur_active);
1559  --i; // The live range was removed from the list of active live ranges.
1560  }
1561  }
1562 
1563  for (int i = 0; i < inactive_live_ranges_.length(); ++i) {
1564  LiveRange* cur_inactive = inactive_live_ranges_.at(i);
1565  if (cur_inactive->End().Value() <= position.Value()) {
1566  InactiveToHandled(cur_inactive);
1567  --i; // Live range was removed from the list of inactive live ranges.
1568  } else if (cur_inactive->Covers(position)) {
1569  InactiveToActive(cur_inactive);
1570  --i; // Live range was removed from the list of inactive live ranges.
1571  }
1572  }
1573 
1574  ASSERT(!current->HasRegisterAssigned() && !current->IsSpilled());
1575 
1576  bool result = TryAllocateFreeReg(current);
1577  if (!AllocationOk()) return;
1578 
1579  if (!result) AllocateBlockedReg(current);
1580  if (!AllocationOk()) return;
1581 
1582  if (current->HasRegisterAssigned()) {
1583  AddToActive(current);
1584  }
1585  }
1586 
1587  reusable_slots_.Rewind(0);
1588  active_live_ranges_.Rewind(0);
1589  inactive_live_ranges_.Rewind(0);
1590 }
1591 
1592 
1593 const char* LAllocator::RegisterName(int allocation_index) {
1594  if (mode_ == GENERAL_REGISTERS) {
1595  return Register::AllocationIndexToString(allocation_index);
1596  } else {
1597  return DoubleRegister::AllocationIndexToString(allocation_index);
1598  }
1599 }
1600 
1601 
1602 void LAllocator::TraceAlloc(const char* msg, ...) {
1603  if (FLAG_trace_alloc) {
1604  va_list arguments;
1605  va_start(arguments, msg);
1606  OS::VPrint(msg, arguments);
1607  va_end(arguments);
1608  }
1609 }
1610 
1611 
1612 bool LAllocator::HasTaggedValue(int virtual_register) const {
1613  HValue* value = graph_->LookupValue(virtual_register);
1614  if (value == NULL) return false;
1615  return value->representation().IsTagged();
1616 }
1617 
1618 
1619 RegisterKind LAllocator::RequiredRegisterKind(int virtual_register) const {
1620  if (virtual_register < first_artificial_register_) {
1621  HValue* value = graph_->LookupValue(virtual_register);
1622  if (value != NULL && value->representation().IsDouble()) {
1623  return DOUBLE_REGISTERS;
1624  }
1625  } else if (double_artificial_registers_.Contains(
1626  virtual_register - first_artificial_register_)) {
1627  return DOUBLE_REGISTERS;
1628  }
1629 
1630  return GENERAL_REGISTERS;
1631 }
1632 
1633 
1634 void LAllocator::AddToActive(LiveRange* range) {
1635  TraceAlloc("Add live range %d to active\n", range->id());
1636  active_live_ranges_.Add(range, zone());
1637 }
1638 
1639 
1640 void LAllocator::AddToInactive(LiveRange* range) {
1641  TraceAlloc("Add live range %d to inactive\n", range->id());
1642  inactive_live_ranges_.Add(range, zone());
1643 }
1644 
1645 
1646 void LAllocator::AddToUnhandledSorted(LiveRange* range) {
1647  if (range == NULL || range->IsEmpty()) return;
1648  ASSERT(!range->HasRegisterAssigned() && !range->IsSpilled());
1649  for (int i = unhandled_live_ranges_.length() - 1; i >= 0; --i) {
1650  LiveRange* cur_range = unhandled_live_ranges_.at(i);
1651  if (range->ShouldBeAllocatedBefore(cur_range)) {
1652  TraceAlloc("Add live range %d to unhandled at %d\n", range->id(), i + 1);
1653  unhandled_live_ranges_.InsertAt(i + 1, range, zone());
1654  ASSERT(UnhandledIsSorted());
1655  return;
1656  }
1657  }
1658  TraceAlloc("Add live range %d to unhandled at start\n", range->id());
1659  unhandled_live_ranges_.InsertAt(0, range, zone());
1660  ASSERT(UnhandledIsSorted());
1661 }
1662 
1663 
1664 void LAllocator::AddToUnhandledUnsorted(LiveRange* range) {
1665  if (range == NULL || range->IsEmpty()) return;
1666  ASSERT(!range->HasRegisterAssigned() && !range->IsSpilled());
1667  TraceAlloc("Add live range %d to unhandled unsorted at end\n", range->id());
1668  unhandled_live_ranges_.Add(range, zone());
1669 }
1670 
1671 
1672 static int UnhandledSortHelper(LiveRange* const* a, LiveRange* const* b) {
1673  ASSERT(!(*a)->ShouldBeAllocatedBefore(*b) ||
1674  !(*b)->ShouldBeAllocatedBefore(*a));
1675  if ((*a)->ShouldBeAllocatedBefore(*b)) return 1;
1676  if ((*b)->ShouldBeAllocatedBefore(*a)) return -1;
1677  return (*a)->id() - (*b)->id();
1678 }
1679 
1680 
1681 // Sort the unhandled live ranges so that the ranges to be processed first are
1682 // at the end of the array list. This is convenient for the register allocation
1683 // algorithm because it is efficient to remove elements from the end.
1684 void LAllocator::SortUnhandled() {
1685  TraceAlloc("Sort unhandled\n");
1686  unhandled_live_ranges_.Sort(&UnhandledSortHelper);
1687 }
1688 
1689 
1690 bool LAllocator::UnhandledIsSorted() {
1691  int len = unhandled_live_ranges_.length();
1692  for (int i = 1; i < len; i++) {
1693  LiveRange* a = unhandled_live_ranges_.at(i - 1);
1694  LiveRange* b = unhandled_live_ranges_.at(i);
1695  if (a->Start().Value() < b->Start().Value()) return false;
1696  }
1697  return true;
1698 }
1699 
1700 
1701 void LAllocator::FreeSpillSlot(LiveRange* range) {
1702  // Check that we are the last range.
1703  if (range->next() != NULL) return;
1704 
1705  if (!range->TopLevel()->HasAllocatedSpillOperand()) return;
1706 
1707  int index = range->TopLevel()->GetSpillOperand()->index();
1708  if (index >= 0) {
1709  reusable_slots_.Add(range, zone());
1710  }
1711 }
1712 
1713 
1714 LOperand* LAllocator::TryReuseSpillSlot(LiveRange* range) {
1715  if (reusable_slots_.is_empty()) return NULL;
1716  if (reusable_slots_.first()->End().Value() >
1717  range->TopLevel()->Start().Value()) {
1718  return NULL;
1719  }
1720  LOperand* result = reusable_slots_.first()->TopLevel()->GetSpillOperand();
1721  reusable_slots_.Remove(0);
1722  return result;
1723 }
1724 
1725 
1726 void LAllocator::ActiveToHandled(LiveRange* range) {
1727  ASSERT(active_live_ranges_.Contains(range));
1728  active_live_ranges_.RemoveElement(range);
1729  TraceAlloc("Moving live range %d from active to handled\n", range->id());
1730  FreeSpillSlot(range);
1731 }
1732 
1733 
1734 void LAllocator::ActiveToInactive(LiveRange* range) {
1735  ASSERT(active_live_ranges_.Contains(range));
1736  active_live_ranges_.RemoveElement(range);
1737  inactive_live_ranges_.Add(range, zone());
1738  TraceAlloc("Moving live range %d from active to inactive\n", range->id());
1739 }
1740 
1741 
1742 void LAllocator::InactiveToHandled(LiveRange* range) {
1743  ASSERT(inactive_live_ranges_.Contains(range));
1744  inactive_live_ranges_.RemoveElement(range);
1745  TraceAlloc("Moving live range %d from inactive to handled\n", range->id());
1746  FreeSpillSlot(range);
1747 }
1748 
1749 
1750 void LAllocator::InactiveToActive(LiveRange* range) {
1751  ASSERT(inactive_live_ranges_.Contains(range));
1752  inactive_live_ranges_.RemoveElement(range);
1753  active_live_ranges_.Add(range, zone());
1754  TraceAlloc("Moving live range %d from inactive to active\n", range->id());
1755 }
1756 
1757 
1758 // TryAllocateFreeReg and AllocateBlockedReg assume this
1759 // when allocating local arrays.
1760 STATIC_ASSERT(DoubleRegister::kNumAllocatableRegisters >=
1761  Register::kNumAllocatableRegisters);
1762 
1763 
1764 bool LAllocator::TryAllocateFreeReg(LiveRange* current) {
1765  LifetimePosition free_until_pos[DoubleRegister::kNumAllocatableRegisters];
1766 
1767  for (int i = 0; i < DoubleRegister::kNumAllocatableRegisters; i++) {
1768  free_until_pos[i] = LifetimePosition::MaxPosition();
1769  }
1770 
1771  for (int i = 0; i < active_live_ranges_.length(); ++i) {
1772  LiveRange* cur_active = active_live_ranges_.at(i);
1773  free_until_pos[cur_active->assigned_register()] =
1775  }
1776 
1777  for (int i = 0; i < inactive_live_ranges_.length(); ++i) {
1778  LiveRange* cur_inactive = inactive_live_ranges_.at(i);
1779  ASSERT(cur_inactive->End().Value() > current->Start().Value());
1780  LifetimePosition next_intersection =
1781  cur_inactive->FirstIntersection(current);
1782  if (!next_intersection.IsValid()) continue;
1783  int cur_reg = cur_inactive->assigned_register();
1784  free_until_pos[cur_reg] = Min(free_until_pos[cur_reg], next_intersection);
1785  }
1786 
1787  UsePosition* hinted_use = current->FirstPosWithHint();
1788  if (hinted_use != NULL) {
1789  LOperand* hint = hinted_use->hint();
1790  if (hint->IsRegister() || hint->IsDoubleRegister()) {
1791  int register_index = hint->index();
1792  TraceAlloc(
1793  "Found reg hint %s (free until [%d) for live range %d (end %d[).\n",
1794  RegisterName(register_index),
1795  free_until_pos[register_index].Value(),
1796  current->id(),
1797  current->End().Value());
1798 
1799  // The desired register is free until the end of the current live range.
1800  if (free_until_pos[register_index].Value() >= current->End().Value()) {
1801  TraceAlloc("Assigning preferred reg %s to live range %d\n",
1802  RegisterName(register_index),
1803  current->id());
1804  current->set_assigned_register(register_index, mode_, zone_);
1805  return true;
1806  }
1807  }
1808  }
1809 
1810  // Find the register which stays free for the longest time.
1811  int reg = 0;
1812  for (int i = 1; i < RegisterCount(); ++i) {
1813  if (free_until_pos[i].Value() > free_until_pos[reg].Value()) {
1814  reg = i;
1815  }
1816  }
1817 
1818  LifetimePosition pos = free_until_pos[reg];
1819 
1820  if (pos.Value() <= current->Start().Value()) {
1821  // All registers are blocked.
1822  return false;
1823  }
1824 
1825  if (pos.Value() < current->End().Value()) {
1826  // Register reg is available at the range start but becomes blocked before
1827  // the range end. Split current at position where it becomes blocked.
1828  LiveRange* tail = SplitRangeAt(current, pos);
1829  if (!AllocationOk()) return false;
1830  AddToUnhandledSorted(tail);
1831  }
1832 
1833 
1834  // Register reg is available at the range start and is free until
1835  // the range end.
1836  ASSERT(pos.Value() >= current->End().Value());
1837  TraceAlloc("Assigning free reg %s to live range %d\n",
1838  RegisterName(reg),
1839  current->id());
1840  current->set_assigned_register(reg, mode_, zone_);
1841 
1842  return true;
1843 }
1844 
1845 
1846 void LAllocator::AllocateBlockedReg(LiveRange* current) {
1847  UsePosition* register_use = current->NextRegisterPosition(current->Start());
1848  if (register_use == NULL) {
1849  // There is no use in the current live range that requires a register.
1850  // We can just spill it.
1851  Spill(current);
1852  return;
1853  }
1854 
1855 
1856  LifetimePosition use_pos[DoubleRegister::kNumAllocatableRegisters];
1857  LifetimePosition block_pos[DoubleRegister::kNumAllocatableRegisters];
1858 
1859  for (int i = 0; i < DoubleRegister::kNumAllocatableRegisters; i++) {
1860  use_pos[i] = block_pos[i] = LifetimePosition::MaxPosition();
1861  }
1862 
1863  for (int i = 0; i < active_live_ranges_.length(); ++i) {
1864  LiveRange* range = active_live_ranges_[i];
1865  int cur_reg = range->assigned_register();
1866  if (range->IsFixed() || !range->CanBeSpilled(current->Start())) {
1867  block_pos[cur_reg] = use_pos[cur_reg] =
1869  } else {
1870  UsePosition* next_use = range->NextUsePositionRegisterIsBeneficial(
1871  current->Start());
1872  if (next_use == NULL) {
1873  use_pos[cur_reg] = range->End();
1874  } else {
1875  use_pos[cur_reg] = next_use->pos();
1876  }
1877  }
1878  }
1879 
1880  for (int i = 0; i < inactive_live_ranges_.length(); ++i) {
1881  LiveRange* range = inactive_live_ranges_.at(i);
1882  ASSERT(range->End().Value() > current->Start().Value());
1883  LifetimePosition next_intersection = range->FirstIntersection(current);
1884  if (!next_intersection.IsValid()) continue;
1885  int cur_reg = range->assigned_register();
1886  if (range->IsFixed()) {
1887  block_pos[cur_reg] = Min(block_pos[cur_reg], next_intersection);
1888  use_pos[cur_reg] = Min(block_pos[cur_reg], use_pos[cur_reg]);
1889  } else {
1890  use_pos[cur_reg] = Min(use_pos[cur_reg], next_intersection);
1891  }
1892  }
1893 
1894  int reg = 0;
1895  for (int i = 1; i < RegisterCount(); ++i) {
1896  if (use_pos[i].Value() > use_pos[reg].Value()) {
1897  reg = i;
1898  }
1899  }
1900 
1901  LifetimePosition pos = use_pos[reg];
1902 
1903  if (pos.Value() < register_use->pos().Value()) {
1904  // All registers are blocked before the first use that requires a register.
1905  // Spill starting part of live range up to that use.
1906  //
1907  // Corner case: the first use position is equal to the start of the range.
1908  // In this case we have nothing to spill and SpillBetween will just return
1909  // this range to the list of unhandled ones. This will lead to the infinite
1910  // loop.
1911  ASSERT(current->Start().Value() < register_use->pos().Value());
1912  SpillBetween(current, current->Start(), register_use->pos());
1913  return;
1914  }
1915 
1916  if (block_pos[reg].Value() < current->End().Value()) {
1917  // Register becomes blocked before the current range end. Split before that
1918  // position.
1919  LiveRange* tail = SplitBetween(current,
1920  current->Start(),
1921  block_pos[reg].InstructionStart());
1922  AddToUnhandledSorted(tail);
1923  }
1924 
1925  // Register reg is not blocked for the whole range.
1926  ASSERT(block_pos[reg].Value() >= current->End().Value());
1927  TraceAlloc("Assigning blocked reg %s to live range %d\n",
1928  RegisterName(reg),
1929  current->id());
1930  current->set_assigned_register(reg, mode_, zone_);
1931 
1932  // This register was not free. Thus we need to find and spill
1933  // parts of active and inactive live regions that use the same register
1934  // at the same lifetime positions as current.
1935  SplitAndSpillIntersecting(current);
1936 }
1937 
1938 
1939 void LAllocator::SplitAndSpillIntersecting(LiveRange* current) {
1940  ASSERT(current->HasRegisterAssigned());
1941  int reg = current->assigned_register();
1942  LifetimePosition split_pos = current->Start();
1943  for (int i = 0; i < active_live_ranges_.length(); ++i) {
1944  LiveRange* range = active_live_ranges_[i];
1945  if (range->assigned_register() == reg) {
1946  UsePosition* next_pos = range->NextRegisterPosition(current->Start());
1947  if (next_pos == NULL) {
1948  SpillAfter(range, split_pos);
1949  } else {
1950  SpillBetween(range, split_pos, next_pos->pos());
1951  }
1952  ActiveToHandled(range);
1953  --i;
1954  }
1955  }
1956 
1957  for (int i = 0; i < inactive_live_ranges_.length(); ++i) {
1958  LiveRange* range = inactive_live_ranges_[i];
1959  ASSERT(range->End().Value() > current->Start().Value());
1960  if (range->assigned_register() == reg && !range->IsFixed()) {
1961  LifetimePosition next_intersection = range->FirstIntersection(current);
1962  if (next_intersection.IsValid()) {
1963  UsePosition* next_pos = range->NextRegisterPosition(current->Start());
1964  if (next_pos == NULL) {
1965  SpillAfter(range, split_pos);
1966  } else {
1967  next_intersection = Min(next_intersection, next_pos->pos());
1968  SpillBetween(range, split_pos, next_intersection);
1969  }
1970  InactiveToHandled(range);
1971  --i;
1972  }
1973  }
1974  }
1975 }
1976 
1977 
1978 bool LAllocator::IsBlockBoundary(LifetimePosition pos) {
1979  return pos.IsInstructionStart() &&
1980  InstructionAt(pos.InstructionIndex())->IsLabel();
1981 }
1982 
1983 
1984 LiveRange* LAllocator::SplitRangeAt(LiveRange* range, LifetimePosition pos) {
1985  ASSERT(!range->IsFixed());
1986  TraceAlloc("Splitting live range %d at %d\n", range->id(), pos.Value());
1987 
1988  if (pos.Value() <= range->Start().Value()) return range;
1989 
1990  // We can't properly connect liveranges if split occured at the end
1991  // of control instruction.
1992  ASSERT(pos.IsInstructionStart() ||
1993  !chunk_->instructions()->at(pos.InstructionIndex())->IsControl());
1994 
1995  LiveRange* result = LiveRangeFor(GetVirtualRegister());
1996  if (!AllocationOk()) return NULL;
1997  range->SplitAt(pos, result, zone_);
1998  return result;
1999 }
2000 
2001 
2002 LiveRange* LAllocator::SplitBetween(LiveRange* range,
2003  LifetimePosition start,
2004  LifetimePosition end) {
2005  ASSERT(!range->IsFixed());
2006  TraceAlloc("Splitting live range %d in position between [%d, %d]\n",
2007  range->id(),
2008  start.Value(),
2009  end.Value());
2010 
2011  LifetimePosition split_pos = FindOptimalSplitPos(start, end);
2012  ASSERT(split_pos.Value() >= start.Value());
2013  return SplitRangeAt(range, split_pos);
2014 }
2015 
2016 
2017 LifetimePosition LAllocator::FindOptimalSplitPos(LifetimePosition start,
2018  LifetimePosition end) {
2019  int start_instr = start.InstructionIndex();
2020  int end_instr = end.InstructionIndex();
2021  ASSERT(start_instr <= end_instr);
2022 
2023  // We have no choice
2024  if (start_instr == end_instr) return end;
2025 
2026  HBasicBlock* start_block = GetBlock(start);
2027  HBasicBlock* end_block = GetBlock(end);
2028 
2029  if (end_block == start_block) {
2030  // The interval is split in the same basic block. Split at the latest
2031  // possible position.
2032  return end;
2033  }
2034 
2035  HBasicBlock* block = end_block;
2036  // Find header of outermost loop.
2037  while (block->parent_loop_header() != NULL &&
2038  block->parent_loop_header()->block_id() > start_block->block_id()) {
2039  block = block->parent_loop_header();
2040  }
2041 
2042  // We did not find any suitable outer loop. Split at the latest possible
2043  // position unless end_block is a loop header itself.
2044  if (block == end_block && !end_block->IsLoopHeader()) return end;
2045 
2047  block->first_instruction_index());
2048 }
2049 
2050 
2051 void LAllocator::SpillAfter(LiveRange* range, LifetimePosition pos) {
2052  LiveRange* second_part = SplitRangeAt(range, pos);
2053  if (!AllocationOk()) return;
2054  Spill(second_part);
2055 }
2056 
2057 
2058 void LAllocator::SpillBetween(LiveRange* range,
2059  LifetimePosition start,
2060  LifetimePosition end) {
2061  ASSERT(start.Value() < end.Value());
2062  LiveRange* second_part = SplitRangeAt(range, start);
2063  if (!AllocationOk()) return;
2064 
2065  if (second_part->Start().Value() < end.Value()) {
2066  // The split result intersects with [start, end[.
2067  // Split it at position between ]start+1, end[, spill the middle part
2068  // and put the rest to unhandled.
2069  LiveRange* third_part = SplitBetween(
2070  second_part,
2071  second_part->Start().InstructionEnd(),
2072  end.PrevInstruction().InstructionEnd());
2073 
2074  ASSERT(third_part != second_part);
2075 
2076  Spill(second_part);
2077  AddToUnhandledSorted(third_part);
2078  } else {
2079  // The split result does not intersect with [start, end[.
2080  // Nothing to spill. Just put it to unhandled as whole.
2081  AddToUnhandledSorted(second_part);
2082  }
2083 }
2084 
2085 
2086 void LAllocator::Spill(LiveRange* range) {
2087  ASSERT(!range->IsSpilled());
2088  TraceAlloc("Spilling live range %d\n", range->id());
2089  LiveRange* first = range->TopLevel();
2090 
2091  if (!first->HasAllocatedSpillOperand()) {
2092  LOperand* op = TryReuseSpillSlot(range);
2093  if (op == NULL) op = chunk_->GetNextSpillSlot(mode_ == DOUBLE_REGISTERS);
2094  first->SetSpillOperand(op);
2095  }
2096  range->MakeSpilled(zone_);
2097 }
2098 
2099 
2100 int LAllocator::RegisterCount() const {
2101  return num_registers_;
2102 }
2103 
2104 
2105 #ifdef DEBUG
2106 
2107 
2108 void LAllocator::Verify() const {
2109  for (int i = 0; i < live_ranges()->length(); ++i) {
2110  LiveRange* current = live_ranges()->at(i);
2111  if (current != NULL) current->Verify();
2112  }
2113 }
2114 
2115 
2116 #endif
2117 
2118 
2119 } } // namespace v8::internal
static HPhi * cast(HValue *value)
LOperand * operand() const
UseInterval * next() const
int index() const
Definition: lithium.h:62
LifetimePosition FirstIntersection(LiveRange *other)
static LUnallocated * cast(LOperand *op)
Definition: lithium.h:198
bool HasAllocatedSpillOperand() const
void MakeSpilled(Zone *zone)
void PrintF(const char *format,...)
Definition: v8utils.cc:40
Definition: v8.h:869
UsePosition * AddUsePosition(LifetimePosition pos, LOperand *operand, Zone *zone)
static LifetimePosition MaxPosition()
LiveRange(int id, Zone *zone)
static LifetimePosition Invalid()
LUnallocated * CopyUnconstrained(Zone *zone)
Definition: lithium.h:192
void ShortenTo(LifetimePosition start)
static const int kNumAllocatableRegisters
bool ShouldBeAllocatedBefore(const LiveRange *other) const
const int kMaxInt
Definition: globals.h:210
UsePosition * NextUsePosition(LifetimePosition start)
static LifetimePosition FromInstructionIndex(int index)
bool CanCover(LifetimePosition position) const
#define ASSERT(condition)
Definition: checks.h:270
bool HasRegisterPolicy() const
Definition: lithium.h:170
static LDoubleRegister * Create(int index, Zone *zone)
Definition: lithium.h:376
LifetimePosition NextInstruction() const
LifetimePosition pos() const
static const char * AllocationIndexToString(int index)
bool Covers(LifetimePosition position)
void set_virtual_register(unsigned id)
Definition: lithium.h:188
LOperand * CreateAssignedOperand(Zone *zone)
int virtual_register() const
Definition: lithium.h:184
#define UNREACHABLE()
Definition: checks.h:50
STATIC_ASSERT((FixedDoubleArray::kHeaderSize &kDoubleAlignmentMask)==0)
LifetimePosition End() const
bool CanBeSpilled(LifetimePosition pos)
void ConvertTo(Kind kind, int index)
Definition: lithium.h:73
bool HasRegisterAssigned() const
void SplitAt(LifetimePosition position, LiveRange *result, Zone *zone)
LifetimePosition end() const
void SetSpillOperand(LOperand *operand)
UsePosition * NextUsePositionRegisterIsBeneficial(LifetimePosition start)
Kind kind() const
Definition: lithium.h:61
UseInterval * first_interval() const
static void VPrint(const char *format, va_list args)
void SplitAt(LifetimePosition pos, Zone *zone)
bool Contains(LifetimePosition point) const
activate correct semantics for inheriting readonliness false
Definition: flags.cc:141
LifetimePosition start() const
LifetimePosition Start() const
UseInterval(LifetimePosition start, LifetimePosition end)
void EnsureInterval(LifetimePosition start, LifetimePosition end, Zone *zone)
UsePosition(LifetimePosition pos, LOperand *operand)
UsePosition * first_pos() const
UsePosition * NextRegisterPosition(LifetimePosition start)
UsePosition * FirstPosWithHint() const
void set_assigned_register(int reg, RegisterKind register_kind, Zone *zone)
static const int kNumAllocatableRegisters
Definition: assembler-arm.h:74
static LRegister * Create(int index, Zone *zone)
Definition: lithium.h:351
LifetimePosition Intersect(const UseInterval *other) const
bool HasAnyPolicy() const
Definition: lithium.h:162
static const char * AllocationIndexToString(int index)
Definition: assembler-arm.h:87
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if NULL
Definition: flags.cc:301
UsePosition * next() const
void AddUseInterval(LifetimePosition start, LifetimePosition end, Zone *zone)
static HValue * cast(HValue *value)
LOperand * GetSpillOperand() const
static const int kInvalidAssignment
LiveRange * next() const
LifetimePosition InstructionEnd() const