30 #if defined(V8_TARGET_ARCH_IA32)
38 LGapResolver::LGapResolver(LCodeGen* owner)
40 moves_(32, owner->zone()),
43 spilled_register_(-1) {}
46 void LGapResolver::Resolve(LParallelMove* parallel_move) {
49 BuildInitialMoveList(parallel_move);
51 for (
int i = 0; i < moves_.length(); ++i) {
52 LMoveOperands move = moves_[i];
56 if (!move.IsEliminated() && !move.source()->IsConstantOperand()) {
62 for (
int i = 0; i < moves_.length(); ++i) {
63 if (!moves_[i].IsEliminated()) {
64 ASSERT(moves_[i].source()->IsConstantOperand());
74 void LGapResolver::BuildInitialMoveList(LParallelMove* parallel_move) {
79 const ZoneList<LMoveOperands>* moves = parallel_move->move_operands();
80 for (
int i = 0; i < moves->length(); ++i) {
81 LMoveOperands move = moves->at(i);
82 if (!move.IsRedundant()) AddMove(move);
88 void LGapResolver::PerformMove(
int index) {
96 ASSERT(!moves_[index].IsPending());
97 ASSERT(!moves_[index].IsRedundant());
102 LOperand* destination = moves_[index].destination();
103 moves_[index].set_destination(
NULL);
109 for (
int i = 0; i < moves_.length(); ++i) {
110 LMoveOperands other_move = moves_[i];
111 if (other_move.Blocks(destination) && !other_move.IsPending()) {
127 moves_[index].set_destination(destination);
131 if (moves_[index].source()->Equals(destination)) {
139 for (
int i = 0; i < moves_.length(); ++i) {
140 LMoveOperands other_move = moves_[i];
141 if (other_move.Blocks(destination)) {
142 ASSERT(other_move.IsPending());
153 void LGapResolver::AddMove(LMoveOperands move) {
154 LOperand* source = move.source();
155 if (source->IsRegister()) ++source_uses_[source->index()];
157 LOperand* destination = move.destination();
158 if (destination->IsRegister()) ++destination_uses_[destination->index()];
160 moves_.Add(move, cgen_->zone());
164 void LGapResolver::RemoveMove(
int index) {
165 LOperand* source = moves_[index].source();
166 if (source->IsRegister()) {
167 --source_uses_[source->index()];
168 ASSERT(source_uses_[source->index()] >= 0);
171 LOperand* destination = moves_[index].destination();
172 if (destination->IsRegister()) {
173 --destination_uses_[destination->index()];
174 ASSERT(destination_uses_[destination->index()] >= 0);
177 moves_[index].Eliminate();
181 int LGapResolver::CountSourceUses(LOperand* operand) {
183 for (
int i = 0; i < moves_.length(); ++i) {
184 if (!moves_[i].IsEliminated() && moves_[i].source()->Equals(operand)) {
192 Register LGapResolver::GetFreeRegisterNot(Register reg) {
195 if (source_uses_[i] == 0 && destination_uses_[i] > 0 && i != skip_index) {
203 bool LGapResolver::HasBeenReset() {
204 if (!moves_.is_empty())
return false;
205 if (spilled_register_ >= 0)
return false;
208 if (source_uses_[i] != 0)
return false;
209 if (destination_uses_[i] != 0)
return false;
215 void LGapResolver::Verify() {
216 #ifdef ENABLE_SLOW_ASSERTS
218 for (
int i = 0; i < moves_.length(); ++i) {
219 LOperand* destination = moves_[i].destination();
220 for (
int j = i + 1; j < moves_.length(); ++j) {
221 SLOW_ASSERT(!destination->Equals(moves_[j].destination()));
228 #define __ ACCESS_MASM(cgen_->masm())
230 void LGapResolver::Finish() {
231 if (spilled_register_ >= 0) {
233 spilled_register_ = -1;
239 void LGapResolver::EnsureRestored(LOperand* operand) {
240 if (operand->IsRegister() && operand->index() == spilled_register_) {
242 spilled_register_ = -1;
247 Register LGapResolver::EnsureTempRegister() {
249 if (spilled_register_ >= 0) {
254 Register free = GetFreeRegisterNot(
no_reg);
255 if (!free.is(
no_reg))
return free;
260 if (source_uses_[i] == 0 && destination_uses_[i] == 0) {
263 spilled_register_ = i;
271 spilled_register_ = 0;
276 void LGapResolver::EmitMove(
int index) {
277 LOperand* source = moves_[index].source();
278 LOperand* destination = moves_[index].destination();
279 EnsureRestored(source);
280 EnsureRestored(destination);
284 if (source->IsRegister()) {
285 ASSERT(destination->IsRegister() || destination->IsStackSlot());
286 Register src = cgen_->ToRegister(source);
287 Operand dst = cgen_->ToOperand(destination);
290 }
else if (source->IsStackSlot()) {
291 ASSERT(destination->IsRegister() || destination->IsStackSlot());
292 Operand src = cgen_->ToOperand(source);
293 if (destination->IsRegister()) {
294 Register dst = cgen_->ToRegister(destination);
299 Register tmp = EnsureTempRegister();
300 Operand dst = cgen_->ToOperand(destination);
305 }
else if (source->IsConstantOperand()) {
307 if (destination->IsRegister()) {
308 Register dst = cgen_->ToRegister(destination);
309 if (cgen_->IsInteger32(constant_source)) {
310 __ Set(dst, cgen_->ToInteger32Immediate(constant_source));
312 __ LoadObject(dst, cgen_->ToHandle(constant_source));
315 ASSERT(destination->IsStackSlot());
316 Operand dst = cgen_->ToOperand(destination);
317 if (cgen_->IsInteger32(constant_source)) {
318 __ Set(dst, cgen_->ToInteger32Immediate(constant_source));
320 Register tmp = EnsureTempRegister();
321 __ LoadObject(tmp, cgen_->ToHandle(constant_source));
326 }
else if (source->IsDoubleRegister()) {
327 XMMRegister src = cgen_->ToDoubleRegister(source);
328 if (destination->IsDoubleRegister()) {
329 XMMRegister dst = cgen_->ToDoubleRegister(destination);
332 ASSERT(destination->IsDoubleStackSlot());
333 Operand dst = cgen_->ToOperand(destination);
336 }
else if (source->IsDoubleStackSlot()) {
337 ASSERT(destination->IsDoubleRegister() ||
338 destination->IsDoubleStackSlot());
339 Operand src = cgen_->ToOperand(source);
340 if (destination->IsDoubleRegister()) {
341 XMMRegister dst = cgen_->ToDoubleRegister(destination);
345 Operand dst = cgen_->ToOperand(destination);
358 void LGapResolver::EmitSwap(
int index) {
359 LOperand* source = moves_[index].source();
360 LOperand* destination = moves_[index].destination();
361 EnsureRestored(source);
362 EnsureRestored(destination);
366 if (source->IsRegister() && destination->IsRegister()) {
368 Register src = cgen_->ToRegister(source);
369 Register dst = cgen_->ToRegister(destination);
372 }
else if ((source->IsRegister() && destination->IsStackSlot()) ||
373 (source->IsStackSlot() && destination->IsRegister())) {
377 Register tmp = GetFreeRegisterNot(
no_reg);
379 cgen_->ToRegister(source->IsRegister() ? source : destination);
381 cgen_->ToOperand(source->IsRegister() ? destination : source);
392 }
else if (source->IsStackSlot() && destination->IsStackSlot()) {
395 Register tmp0 = EnsureTempRegister();
396 Register tmp1 = GetFreeRegisterNot(tmp0);
397 Operand src = cgen_->ToOperand(source);
398 Operand dst = cgen_->ToOperand(destination);
412 }
else if (source->IsDoubleRegister() && destination->IsDoubleRegister()) {
415 XMMRegister src = cgen_->ToDoubleRegister(source);
416 XMMRegister dst = cgen_->ToDoubleRegister(destination);
421 }
else if (source->IsDoubleRegister() || destination->IsDoubleRegister()) {
424 ASSERT(source->IsDoubleStackSlot() || destination->IsDoubleStackSlot());
425 XMMRegister reg = cgen_->ToDoubleRegister(source->IsDoubleRegister()
429 cgen_->ToOperand(source->IsDoubleRegister() ? destination : source);
431 __ movdbl(other, reg);
432 __ movdbl(reg, Operand(
xmm0));
434 }
else if (source->IsDoubleStackSlot() && destination->IsDoubleStackSlot()) {
438 Register tmp = EnsureTempRegister();
439 Operand src0 = cgen_->ToOperand(source);
440 Operand src1 = cgen_->HighOperand(source);
441 Operand dst0 = cgen_->ToOperand(destination);
442 Operand dst1 = cgen_->HighOperand(destination);
462 for (
int i = 0; i < moves_.length(); ++i) {
463 LMoveOperands other_move = moves_[i];
464 if (other_move.Blocks(source)) {
465 moves_[i].set_source(destination);
466 }
else if (other_move.Blocks(destination)) {
467 moves_[i].set_source(source);
473 if (source->IsRegister() && destination->IsRegister()) {
474 int temp = source_uses_[source->index()];
475 source_uses_[source->index()] = source_uses_[destination->index()];
476 source_uses_[destination->index()] = temp;
477 }
else if (source->IsRegister()) {
480 source_uses_[source->index()] = CountSourceUses(source);
481 }
else if (destination->IsRegister()) {
482 source_uses_[destination->index()] = CountSourceUses(destination);
490 #endif // V8_TARGET_ARCH_IA32
#define SLOW_ASSERT(condition)
#define ASSERT(condition)
static LConstantOperand * cast(LOperand *op)
static Register FromAllocationIndex(int index)
static int ToAllocationIndex(Register reg)
static const int kNumAllocatableRegisters
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination trace on stack replacement optimize closures functions with arguments object optimize functions containing for in loops profiler considers IC stability primitive functions trigger their own optimization re try self optimization if it failed insert an interrupt check at function exit execution budget before interrupt is triggered call count before self optimization self_optimization count_based_interrupts weighted_back_edges trace_opt emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 enable use of ARMv7 instructions if enable use of MIPS FPU instructions if NULL