v8  3.25.30(node0.11.13)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
lithium-arm64.cc
Go to the documentation of this file.
1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "v8.h"
29 
30 #include "lithium-allocator-inl.h"
31 #include "arm64/lithium-arm64.h"
33 #include "hydrogen-osr.h"
34 
35 namespace v8 {
36 namespace internal {
37 
38 
39 #define DEFINE_COMPILE(type) \
40  void L##type::CompileToNative(LCodeGen* generator) { \
41  generator->Do##type(this); \
42  }
44 #undef DEFINE_COMPILE
45 
46 #ifdef DEBUG
47 void LInstruction::VerifyCall() {
48  // Call instructions can use only fixed registers as temporaries and
49  // outputs because all registers are blocked by the calling convention.
50  // Inputs operands must use a fixed register or use-at-start policy or
51  // a non-register policy.
52  ASSERT(Output() == NULL ||
53  LUnallocated::cast(Output())->HasFixedPolicy() ||
54  !LUnallocated::cast(Output())->HasRegisterPolicy());
55  for (UseIterator it(this); !it.Done(); it.Advance()) {
56  LUnallocated* operand = LUnallocated::cast(it.Current());
57  ASSERT(operand->HasFixedPolicy() ||
58  operand->IsUsedAtStart());
59  }
60  for (TempIterator it(this); !it.Done(); it.Advance()) {
61  LUnallocated* operand = LUnallocated::cast(it.Current());
62  ASSERT(operand->HasFixedPolicy() ||!operand->HasRegisterPolicy());
63  }
64 }
65 #endif
66 
67 
68 void LLabel::PrintDataTo(StringStream* stream) {
69  LGap::PrintDataTo(stream);
70  LLabel* rep = replacement();
71  if (rep != NULL) {
72  stream->Add(" Dead block replaced with B%d", rep->block_id());
73  }
74 }
75 
76 
77 void LAccessArgumentsAt::PrintDataTo(StringStream* stream) {
78  arguments()->PrintTo(stream);
79  stream->Add(" length ");
80  length()->PrintTo(stream);
81  stream->Add(" index ");
82  index()->PrintTo(stream);
83 }
84 
85 
86 void LBranch::PrintDataTo(StringStream* stream) {
87  stream->Add("B%d | B%d on ", true_block_id(), false_block_id());
88  value()->PrintTo(stream);
89 }
90 
91 
92 void LCallJSFunction::PrintDataTo(StringStream* stream) {
93  stream->Add("= ");
94  function()->PrintTo(stream);
95  stream->Add("#%d / ", arity());
96 }
97 
98 
99 void LCallWithDescriptor::PrintDataTo(StringStream* stream) {
100  for (int i = 0; i < InputCount(); i++) {
101  InputAt(i)->PrintTo(stream);
102  stream->Add(" ");
103  }
104  stream->Add("#%d / ", arity());
105 }
106 
107 
108 void LCallNew::PrintDataTo(StringStream* stream) {
109  stream->Add("= ");
110  constructor()->PrintTo(stream);
111  stream->Add(" #%d / ", arity());
112 }
113 
114 
115 void LCallNewArray::PrintDataTo(StringStream* stream) {
116  stream->Add("= ");
117  constructor()->PrintTo(stream);
118  stream->Add(" #%d / ", arity());
119  ElementsKind kind = hydrogen()->elements_kind();
120  stream->Add(" (%s) ", ElementsKindToString(kind));
121 }
122 
123 
124 void LClassOfTestAndBranch::PrintDataTo(StringStream* stream) {
125  stream->Add("if class_of_test(");
126  value()->PrintTo(stream);
127  stream->Add(", \"%o\") then B%d else B%d",
128  *hydrogen()->class_name(),
129  true_block_id(),
130  false_block_id());
131 }
132 
133 
134 void LCompareNumericAndBranch::PrintDataTo(StringStream* stream) {
135  stream->Add("if ");
136  left()->PrintTo(stream);
137  stream->Add(" %s ", Token::String(op()));
138  right()->PrintTo(stream);
139  stream->Add(" then B%d else B%d", true_block_id(), false_block_id());
140 }
141 
142 
143 void LHasCachedArrayIndexAndBranch::PrintDataTo(StringStream* stream) {
144  stream->Add("if has_cached_array_index(");
145  value()->PrintTo(stream);
146  stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
147 }
148 
149 
150 bool LGoto::HasInterestingComment(LCodeGen* gen) const {
151  return !gen->IsNextEmittedBlock(block_id());
152 }
153 
154 
155 void LGoto::PrintDataTo(StringStream* stream) {
156  stream->Add("B%d", block_id());
157 }
158 
159 
160 void LInnerAllocatedObject::PrintDataTo(StringStream* stream) {
161  stream->Add(" = ");
162  base_object()->PrintTo(stream);
163  stream->Add(" + ");
164  offset()->PrintTo(stream);
165 }
166 
167 
168 void LInvokeFunction::PrintDataTo(StringStream* stream) {
169  stream->Add("= ");
170  function()->PrintTo(stream);
171  stream->Add(" #%d / ", arity());
172 }
173 
174 
175 void LInstruction::PrintTo(StringStream* stream) {
176  stream->Add("%s ", this->Mnemonic());
177 
178  PrintOutputOperandTo(stream);
179 
180  PrintDataTo(stream);
181 
182  if (HasEnvironment()) {
183  stream->Add(" ");
184  environment()->PrintTo(stream);
185  }
186 
187  if (HasPointerMap()) {
188  stream->Add(" ");
189  pointer_map()->PrintTo(stream);
190  }
191 }
192 
193 
194 void LInstruction::PrintDataTo(StringStream* stream) {
195  stream->Add("= ");
196  for (int i = 0; i < InputCount(); i++) {
197  if (i > 0) stream->Add(" ");
198  if (InputAt(i) == NULL) {
199  stream->Add("NULL");
200  } else {
201  InputAt(i)->PrintTo(stream);
202  }
203  }
204 }
205 
206 
207 void LInstruction::PrintOutputOperandTo(StringStream* stream) {
208  if (HasResult()) result()->PrintTo(stream);
209 }
210 
211 
212 void LHasInstanceTypeAndBranch::PrintDataTo(StringStream* stream) {
213  stream->Add("if has_instance_type(");
214  value()->PrintTo(stream);
215  stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
216 }
217 
218 
219 void LIsObjectAndBranch::PrintDataTo(StringStream* stream) {
220  stream->Add("if is_object(");
221  value()->PrintTo(stream);
222  stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
223 }
224 
225 
226 void LIsStringAndBranch::PrintDataTo(StringStream* stream) {
227  stream->Add("if is_string(");
228  value()->PrintTo(stream);
229  stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
230 }
231 
232 
233 void LIsSmiAndBranch::PrintDataTo(StringStream* stream) {
234  stream->Add("if is_smi(");
235  value()->PrintTo(stream);
236  stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
237 }
238 
239 
240 void LTypeofIsAndBranch::PrintDataTo(StringStream* stream) {
241  stream->Add("if typeof ");
242  value()->PrintTo(stream);
243  stream->Add(" == \"%s\" then B%d else B%d",
244  hydrogen()->type_literal()->ToCString().get(),
245  true_block_id(), false_block_id());
246 }
247 
248 
249 void LIsUndetectableAndBranch::PrintDataTo(StringStream* stream) {
250  stream->Add("if is_undetectable(");
251  value()->PrintTo(stream);
252  stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
253 }
254 
255 
256 bool LGap::IsRedundant() const {
257  for (int i = 0; i < 4; i++) {
258  if ((parallel_moves_[i] != NULL) && !parallel_moves_[i]->IsRedundant()) {
259  return false;
260  }
261  }
262 
263  return true;
264 }
265 
266 
267 void LGap::PrintDataTo(StringStream* stream) {
268  for (int i = 0; i < 4; i++) {
269  stream->Add("(");
270  if (parallel_moves_[i] != NULL) {
271  parallel_moves_[i]->PrintDataTo(stream);
272  }
273  stream->Add(") ");
274  }
275 }
276 
277 
278 void LLoadContextSlot::PrintDataTo(StringStream* stream) {
279  context()->PrintTo(stream);
280  stream->Add("[%d]", slot_index());
281 }
282 
283 
284 void LStoreCodeEntry::PrintDataTo(StringStream* stream) {
285  stream->Add(" = ");
286  function()->PrintTo(stream);
287  stream->Add(".code_entry = ");
288  code_object()->PrintTo(stream);
289 }
290 
291 
292 void LStoreContextSlot::PrintDataTo(StringStream* stream) {
293  context()->PrintTo(stream);
294  stream->Add("[%d] <- ", slot_index());
295  value()->PrintTo(stream);
296 }
297 
298 
299 void LStoreKeyedGeneric::PrintDataTo(StringStream* stream) {
300  object()->PrintTo(stream);
301  stream->Add("[");
302  key()->PrintTo(stream);
303  stream->Add("] <- ");
304  value()->PrintTo(stream);
305 }
306 
307 
308 void LStoreNamedField::PrintDataTo(StringStream* stream) {
309  object()->PrintTo(stream);
310  hydrogen()->access().PrintTo(stream);
311  stream->Add(" <- ");
312  value()->PrintTo(stream);
313 }
314 
315 
316 void LStoreNamedGeneric::PrintDataTo(StringStream* stream) {
317  object()->PrintTo(stream);
318  stream->Add(".");
319  stream->Add(String::cast(*name())->ToCString().get());
320  stream->Add(" <- ");
321  value()->PrintTo(stream);
322 }
323 
324 
325 void LStringCompareAndBranch::PrintDataTo(StringStream* stream) {
326  stream->Add("if string_compare(");
327  left()->PrintTo(stream);
328  right()->PrintTo(stream);
329  stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
330 }
331 
332 
333 void LTransitionElementsKind::PrintDataTo(StringStream* stream) {
334  object()->PrintTo(stream);
335  stream->Add("%p -> %p", *original_map(), *transitioned_map());
336 }
337 
338 
339 template<int T>
341  value()->PrintTo(stream);
342 }
343 
344 
345 const char* LArithmeticD::Mnemonic() const {
346  switch (op()) {
347  case Token::ADD: return "add-d";
348  case Token::SUB: return "sub-d";
349  case Token::MUL: return "mul-d";
350  case Token::DIV: return "div-d";
351  case Token::MOD: return "mod-d";
352  default:
353  UNREACHABLE();
354  return NULL;
355  }
356 }
357 
358 
359 const char* LArithmeticT::Mnemonic() const {
360  switch (op()) {
361  case Token::ADD: return "add-t";
362  case Token::SUB: return "sub-t";
363  case Token::MUL: return "mul-t";
364  case Token::MOD: return "mod-t";
365  case Token::DIV: return "div-t";
366  case Token::BIT_AND: return "bit-and-t";
367  case Token::BIT_OR: return "bit-or-t";
368  case Token::BIT_XOR: return "bit-xor-t";
369  case Token::ROR: return "ror-t";
370  case Token::SHL: return "shl-t";
371  case Token::SAR: return "sar-t";
372  case Token::SHR: return "shr-t";
373  default:
374  UNREACHABLE();
375  return NULL;
376  }
377 }
378 
379 
380 void LChunkBuilder::Abort(BailoutReason reason) {
381  info()->set_bailout_reason(reason);
382  status_ = ABORTED;
383 }
384 
385 
386 LUnallocated* LChunkBuilder::ToUnallocated(Register reg) {
387  return new(zone()) LUnallocated(LUnallocated::FIXED_REGISTER,
389 }
390 
391 
392 LUnallocated* LChunkBuilder::ToUnallocated(DoubleRegister reg) {
393  return new(zone()) LUnallocated(LUnallocated::FIXED_DOUBLE_REGISTER,
395 }
396 
397 
398 LOperand* LChunkBuilder::Use(HValue* value, LUnallocated* operand) {
399  if (value->EmitAtUses()) {
400  HInstruction* instr = HInstruction::cast(value);
401  VisitInstruction(instr);
402  }
403  operand->set_virtual_register(value->id());
404  return operand;
405 }
406 
407 
408 LOperand* LChunkBuilder::UseFixed(HValue* value, Register fixed_register) {
409  return Use(value, ToUnallocated(fixed_register));
410 }
411 
412 
413 LOperand* LChunkBuilder::UseFixedDouble(HValue* value,
414  DoubleRegister fixed_register) {
415  return Use(value, ToUnallocated(fixed_register));
416 }
417 
418 
419 LOperand* LChunkBuilder::UseRegister(HValue* value) {
420  return Use(value, new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER));
421 }
422 
423 
424 LOperand* LChunkBuilder::UseRegisterAndClobber(HValue* value) {
425  return Use(value, new(zone()) LUnallocated(LUnallocated::WRITABLE_REGISTER));
426 }
427 
428 
429 LOperand* LChunkBuilder::UseRegisterAtStart(HValue* value) {
430  return Use(value,
431  new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER,
433 }
434 
435 
436 LOperand* LChunkBuilder::UseRegisterOrConstant(HValue* value) {
437  return value->IsConstant() ? UseConstant(value) : UseRegister(value);
438 }
439 
440 
441 LOperand* LChunkBuilder::UseRegisterOrConstantAtStart(HValue* value) {
442  return value->IsConstant() ? UseConstant(value) : UseRegisterAtStart(value);
443 }
444 
445 
446 LConstantOperand* LChunkBuilder::UseConstant(HValue* value) {
447  return chunk_->DefineConstantOperand(HConstant::cast(value));
448 }
449 
450 
451 LOperand* LChunkBuilder::UseAny(HValue* value) {
452  return value->IsConstant()
453  ? UseConstant(value)
454  : Use(value, new(zone()) LUnallocated(LUnallocated::ANY));
455 }
456 
457 
458 LInstruction* LChunkBuilder::Define(LTemplateResultInstruction<1>* instr,
459  LUnallocated* result) {
460  result->set_virtual_register(current_instruction_->id());
461  instr->set_result(result);
462  return instr;
463 }
464 
465 
466 LInstruction* LChunkBuilder::DefineAsRegister(
467  LTemplateResultInstruction<1>* instr) {
468  return Define(instr,
469  new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER));
470 }
471 
472 
473 LInstruction* LChunkBuilder::DefineAsSpilled(
474  LTemplateResultInstruction<1>* instr, int index) {
475  return Define(instr,
476  new(zone()) LUnallocated(LUnallocated::FIXED_SLOT, index));
477 }
478 
479 
480 LInstruction* LChunkBuilder::DefineSameAsFirst(
481  LTemplateResultInstruction<1>* instr) {
482  return Define(instr,
483  new(zone()) LUnallocated(LUnallocated::SAME_AS_FIRST_INPUT));
484 }
485 
486 
487 LInstruction* LChunkBuilder::DefineFixed(
488  LTemplateResultInstruction<1>* instr, Register reg) {
489  return Define(instr, ToUnallocated(reg));
490 }
491 
492 
493 LInstruction* LChunkBuilder::DefineFixedDouble(
494  LTemplateResultInstruction<1>* instr, DoubleRegister reg) {
495  return Define(instr, ToUnallocated(reg));
496 }
497 
498 
499 LInstruction* LChunkBuilder::MarkAsCall(LInstruction* instr,
500  HInstruction* hinstr,
501  CanDeoptimize can_deoptimize) {
502  info()->MarkAsNonDeferredCalling();
503 #ifdef DEBUG
504  instr->VerifyCall();
505 #endif
506  instr->MarkAsCall();
507  instr = AssignPointerMap(instr);
508 
509  // If instruction does not have side-effects lazy deoptimization
510  // after the call will try to deoptimize to the point before the call.
511  // Thus we still need to attach environment to this call even if
512  // call sequence can not deoptimize eagerly.
513  bool needs_environment =
514  (can_deoptimize == CAN_DEOPTIMIZE_EAGERLY) ||
515  !hinstr->HasObservableSideEffects();
516  if (needs_environment && !instr->HasEnvironment()) {
517  instr = AssignEnvironment(instr);
518  }
519 
520  return instr;
521 }
522 
523 
524 LInstruction* LChunkBuilder::AssignPointerMap(LInstruction* instr) {
525  ASSERT(!instr->HasPointerMap());
526  instr->set_pointer_map(new(zone()) LPointerMap(zone()));
527  return instr;
528 }
529 
530 
531 LUnallocated* LChunkBuilder::TempRegister() {
532  LUnallocated* operand =
533  new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER);
534  int vreg = allocator_->GetVirtualRegister();
535  if (!allocator_->AllocationOk()) {
536  Abort(kOutOfVirtualRegistersWhileTryingToAllocateTempRegister);
537  vreg = 0;
538  }
539  operand->set_virtual_register(vreg);
540  return operand;
541 }
542 
543 
544 int LPlatformChunk::GetNextSpillIndex() {
545  return spill_slot_count_++;
546 }
547 
548 
549 LOperand* LPlatformChunk::GetNextSpillSlot(RegisterKind kind) {
550  int index = GetNextSpillIndex();
551  if (kind == DOUBLE_REGISTERS) {
552  return LDoubleStackSlot::Create(index, zone());
553  } else {
554  ASSERT(kind == GENERAL_REGISTERS);
555  return LStackSlot::Create(index, zone());
556  }
557 }
558 
559 
560 LOperand* LChunkBuilder::FixedTemp(DoubleRegister reg) {
561  LUnallocated* operand = ToUnallocated(reg);
562  ASSERT(operand->HasFixedPolicy());
563  return operand;
564 }
565 
566 
567 LPlatformChunk* LChunkBuilder::Build() {
568  ASSERT(is_unused());
569  chunk_ = new(zone()) LPlatformChunk(info_, graph_);
570  LPhase phase("L_Building chunk", chunk_);
571  status_ = BUILDING;
572 
573  // If compiling for OSR, reserve space for the unoptimized frame,
574  // which will be subsumed into this frame.
575  if (graph()->has_osr()) {
576  // TODO(all): GetNextSpillIndex just increments a field. It has no other
577  // side effects, so we should get rid of this loop.
578  for (int i = graph()->osr()->UnoptimizedFrameSlots(); i > 0; i--) {
579  chunk_->GetNextSpillIndex();
580  }
581  }
582 
583  const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
584  for (int i = 0; i < blocks->length(); i++) {
585  DoBasicBlock(blocks->at(i));
586  if (is_aborted()) return NULL;
587  }
588  status_ = DONE;
589  return chunk_;
590 }
591 
592 
593 void LChunkBuilder::DoBasicBlock(HBasicBlock* block) {
594  ASSERT(is_building());
595  current_block_ = block;
596 
597  if (block->IsStartBlock()) {
598  block->UpdateEnvironment(graph_->start_environment());
599  argument_count_ = 0;
600  } else if (block->predecessors()->length() == 1) {
601  // We have a single predecessor => copy environment and outgoing
602  // argument count from the predecessor.
603  ASSERT(block->phis()->length() == 0);
604  HBasicBlock* pred = block->predecessors()->at(0);
605  HEnvironment* last_environment = pred->last_environment();
606  ASSERT(last_environment != NULL);
607 
608  // Only copy the environment, if it is later used again.
609  if (pred->end()->SecondSuccessor() == NULL) {
610  ASSERT(pred->end()->FirstSuccessor() == block);
611  } else {
612  if ((pred->end()->FirstSuccessor()->block_id() > block->block_id()) ||
613  (pred->end()->SecondSuccessor()->block_id() > block->block_id())) {
614  last_environment = last_environment->Copy();
615  }
616  }
617  block->UpdateEnvironment(last_environment);
618  ASSERT(pred->argument_count() >= 0);
619  argument_count_ = pred->argument_count();
620  } else {
621  // We are at a state join => process phis.
622  HBasicBlock* pred = block->predecessors()->at(0);
623  // No need to copy the environment, it cannot be used later.
624  HEnvironment* last_environment = pred->last_environment();
625  for (int i = 0; i < block->phis()->length(); ++i) {
626  HPhi* phi = block->phis()->at(i);
627  if (phi->HasMergedIndex()) {
628  last_environment->SetValueAt(phi->merged_index(), phi);
629  }
630  }
631  for (int i = 0; i < block->deleted_phis()->length(); ++i) {
632  if (block->deleted_phis()->at(i) < last_environment->length()) {
633  last_environment->SetValueAt(block->deleted_phis()->at(i),
634  graph_->GetConstantUndefined());
635  }
636  }
637  block->UpdateEnvironment(last_environment);
638  // Pick up the outgoing argument count of one of the predecessors.
639  argument_count_ = pred->argument_count();
640  }
641 
642  // Translate hydrogen instructions to lithium ones for the current block.
643  HInstruction* current = block->first();
644  int start = chunk_->instructions()->length();
645  while ((current != NULL) && !is_aborted()) {
646  // Code for constants in registers is generated lazily.
647  if (!current->EmitAtUses()) {
648  VisitInstruction(current);
649  }
650  current = current->next();
651  }
652  int end = chunk_->instructions()->length() - 1;
653  if (end >= start) {
654  block->set_first_instruction_index(start);
655  block->set_last_instruction_index(end);
656  }
657  block->set_argument_count(argument_count_);
658  current_block_ = NULL;
659 }
660 
661 
662 void LChunkBuilder::VisitInstruction(HInstruction* current) {
663  HInstruction* old_current = current_instruction_;
664  current_instruction_ = current;
665 
666  LInstruction* instr = NULL;
667  if (current->CanReplaceWithDummyUses()) {
668  if (current->OperandCount() == 0) {
669  instr = DefineAsRegister(new(zone()) LDummy());
670  } else {
671  ASSERT(!current->OperandAt(0)->IsControlInstruction());
672  instr = DefineAsRegister(new(zone())
673  LDummyUse(UseAny(current->OperandAt(0))));
674  }
675  for (int i = 1; i < current->OperandCount(); ++i) {
676  if (current->OperandAt(i)->IsControlInstruction()) continue;
677  LInstruction* dummy =
678  new(zone()) LDummyUse(UseAny(current->OperandAt(i)));
679  dummy->set_hydrogen_value(current);
680  chunk_->AddInstruction(dummy, current_block_);
681  }
682  } else {
683  instr = current->CompileToLithium(this);
684  }
685 
686  argument_count_ += current->argument_delta();
687  ASSERT(argument_count_ >= 0);
688 
689  if (instr != NULL) {
690  // Associate the hydrogen instruction first, since we may need it for
691  // the ClobbersRegisters() or ClobbersDoubleRegisters() calls below.
692  instr->set_hydrogen_value(current);
693 
694 #if DEBUG
695  // Make sure that the lithium instruction has either no fixed register
696  // constraints in temps or the result OR no uses that are only used at
697  // start. If this invariant doesn't hold, the register allocator can decide
698  // to insert a split of a range immediately before the instruction due to an
699  // already allocated register needing to be used for the instruction's fixed
700  // register constraint. In this case, the register allocator won't see an
701  // interference between the split child and the use-at-start (it would if
702  // the it was just a plain use), so it is free to move the split child into
703  // the same register that is used for the use-at-start.
704  // See https://code.google.com/p/chromium/issues/detail?id=201590
705  if (!(instr->ClobbersRegisters() && instr->ClobbersDoubleRegisters())) {
706  int fixed = 0;
707  int used_at_start = 0;
708  for (UseIterator it(instr); !it.Done(); it.Advance()) {
709  LUnallocated* operand = LUnallocated::cast(it.Current());
710  if (operand->IsUsedAtStart()) ++used_at_start;
711  }
712  if (instr->Output() != NULL) {
713  if (LUnallocated::cast(instr->Output())->HasFixedPolicy()) ++fixed;
714  }
715  for (TempIterator it(instr); !it.Done(); it.Advance()) {
716  LUnallocated* operand = LUnallocated::cast(it.Current());
717  if (operand->HasFixedPolicy()) ++fixed;
718  }
719  ASSERT(fixed == 0 || used_at_start == 0);
720  }
721 #endif
722 
723  if (FLAG_stress_pointer_maps && !instr->HasPointerMap()) {
724  instr = AssignPointerMap(instr);
725  }
726  if (FLAG_stress_environments && !instr->HasEnvironment()) {
727  instr = AssignEnvironment(instr);
728  }
729  chunk_->AddInstruction(instr, current_block_);
730 
731  if (instr->IsCall()) {
732  HValue* hydrogen_value_for_lazy_bailout = current;
733  LInstruction* instruction_needing_environment = NULL;
734  if (current->HasObservableSideEffects()) {
735  HSimulate* sim = HSimulate::cast(current->next());
736  instruction_needing_environment = instr;
737  sim->ReplayEnvironment(current_block_->last_environment());
738  hydrogen_value_for_lazy_bailout = sim;
739  }
740  LInstruction* bailout = AssignEnvironment(new(zone()) LLazyBailout());
741  bailout->set_hydrogen_value(hydrogen_value_for_lazy_bailout);
742  chunk_->AddInstruction(bailout, current_block_);
743  if (instruction_needing_environment != NULL) {
744  // Store the lazy deopt environment with the instruction if needed.
745  // Right now it is only used for LInstanceOfKnownGlobal.
746  instruction_needing_environment->
747  SetDeferredLazyDeoptimizationEnvironment(bailout->environment());
748  }
749  }
750  }
751  current_instruction_ = old_current;
752 }
753 
754 
755 LInstruction* LChunkBuilder::AssignEnvironment(LInstruction* instr) {
756  HEnvironment* hydrogen_env = current_block_->last_environment();
757  int argument_index_accumulator = 0;
758  ZoneList<HValue*> objects_to_materialize(0, zone());
759  instr->set_environment(CreateEnvironment(hydrogen_env,
760  &argument_index_accumulator,
761  &objects_to_materialize));
762  return instr;
763 }
764 
765 
766 LInstruction* LChunkBuilder::DoAbnormalExit(HAbnormalExit* instr) {
767  // The control instruction marking the end of a block that completed
768  // abruptly (e.g., threw an exception). There is nothing specific to do.
769  return NULL;
770 }
771 
772 
773 LInstruction* LChunkBuilder::DoArithmeticD(Token::Value op,
774  HArithmeticBinaryOperation* instr) {
775  ASSERT(instr->representation().IsDouble());
776  ASSERT(instr->left()->representation().IsDouble());
777  ASSERT(instr->right()->representation().IsDouble());
778 
779  if (op == Token::MOD) {
780  LOperand* left = UseFixedDouble(instr->left(), d0);
781  LOperand* right = UseFixedDouble(instr->right(), d1);
782  LArithmeticD* result = new(zone()) LArithmeticD(Token::MOD, left, right);
783  return MarkAsCall(DefineFixedDouble(result, d0), instr);
784  } else {
785  LOperand* left = UseRegisterAtStart(instr->left());
786  LOperand* right = UseRegisterAtStart(instr->right());
787  LArithmeticD* result = new(zone()) LArithmeticD(op, left, right);
788  return DefineAsRegister(result);
789  }
790 }
791 
792 
793 LInstruction* LChunkBuilder::DoArithmeticT(Token::Value op,
794  HBinaryOperation* instr) {
795  ASSERT((op == Token::ADD) || (op == Token::SUB) || (op == Token::MUL) ||
796  (op == Token::DIV) || (op == Token::MOD) || (op == Token::SHR) ||
797  (op == Token::SHL) || (op == Token::SAR) || (op == Token::ROR) ||
798  (op == Token::BIT_OR) || (op == Token::BIT_AND) ||
799  (op == Token::BIT_XOR));
800  HValue* left = instr->left();
801  HValue* right = instr->right();
802 
803  // TODO(jbramley): Once we've implemented smi support for all arithmetic
804  // operations, these assertions should check IsTagged().
805  ASSERT(instr->representation().IsSmiOrTagged());
806  ASSERT(left->representation().IsSmiOrTagged());
807  ASSERT(right->representation().IsSmiOrTagged());
808 
809  LOperand* context = UseFixed(instr->context(), cp);
810  LOperand* left_operand = UseFixed(left, x1);
811  LOperand* right_operand = UseFixed(right, x0);
812  LArithmeticT* result =
813  new(zone()) LArithmeticT(op, context, left_operand, right_operand);
814  return MarkAsCall(DefineFixed(result, x0), instr);
815 }
816 
817 
818 LInstruction* LChunkBuilder::DoBoundsCheckBaseIndexInformation(
819  HBoundsCheckBaseIndexInformation* instr) {
820  UNREACHABLE();
821  return NULL;
822 }
823 
824 
825 LInstruction* LChunkBuilder::DoAccessArgumentsAt(HAccessArgumentsAt* instr) {
826  info()->MarkAsRequiresFrame();
827  LOperand* args = NULL;
828  LOperand* length = NULL;
829  LOperand* index = NULL;
830 
831  if (instr->length()->IsConstant() && instr->index()->IsConstant()) {
832  args = UseRegisterAtStart(instr->arguments());
833  length = UseConstant(instr->length());
834  index = UseConstant(instr->index());
835  } else {
836  args = UseRegister(instr->arguments());
837  length = UseRegisterAtStart(instr->length());
838  index = UseRegisterOrConstantAtStart(instr->index());
839  }
840 
841  return DefineAsRegister(new(zone()) LAccessArgumentsAt(args, length, index));
842 }
843 
844 
845 LInstruction* LChunkBuilder::DoAdd(HAdd* instr) {
846  if (instr->representation().IsSmiOrInteger32()) {
847  ASSERT(instr->left()->representation().Equals(instr->representation()));
848  ASSERT(instr->right()->representation().Equals(instr->representation()));
849  LOperand* left = UseRegisterAtStart(instr->BetterLeftOperand());
850  LOperand* right =
851  UseRegisterOrConstantAtStart(instr->BetterRightOperand());
852  LInstruction* result = instr->representation().IsSmi() ?
853  DefineAsRegister(new(zone()) LAddS(left, right)) :
854  DefineAsRegister(new(zone()) LAddI(left, right));
855  if (instr->CheckFlag(HValue::kCanOverflow)) {
856  result = AssignEnvironment(result);
857  }
858  return result;
859  } else if (instr->representation().IsExternal()) {
860  ASSERT(instr->left()->representation().IsExternal());
861  ASSERT(instr->right()->representation().IsInteger32());
862  ASSERT(!instr->CheckFlag(HValue::kCanOverflow));
863  LOperand* left = UseRegisterAtStart(instr->left());
864  LOperand* right = UseRegisterOrConstantAtStart(instr->right());
865  return DefineAsRegister(new(zone()) LAddE(left, right));
866  } else if (instr->representation().IsDouble()) {
867  return DoArithmeticD(Token::ADD, instr);
868  } else {
869  ASSERT(instr->representation().IsTagged());
870  return DoArithmeticT(Token::ADD, instr);
871  }
872 }
873 
874 
875 LInstruction* LChunkBuilder::DoAllocate(HAllocate* instr) {
876  info()->MarkAsDeferredCalling();
877  LOperand* context = UseAny(instr->context());
878  LOperand* size = UseRegisterOrConstant(instr->size());
879  LOperand* temp1 = TempRegister();
880  LOperand* temp2 = TempRegister();
881  LOperand* temp3 = instr->MustPrefillWithFiller() ? TempRegister() : NULL;
882  LAllocate* result = new(zone()) LAllocate(context, size, temp1, temp2, temp3);
883  return AssignPointerMap(DefineAsRegister(result));
884 }
885 
886 
887 LInstruction* LChunkBuilder::DoApplyArguments(HApplyArguments* instr) {
888  LOperand* function = UseFixed(instr->function(), x1);
889  LOperand* receiver = UseFixed(instr->receiver(), x0);
890  LOperand* length = UseFixed(instr->length(), x2);
891  LOperand* elements = UseFixed(instr->elements(), x3);
892  LApplyArguments* result = new(zone()) LApplyArguments(function,
893  receiver,
894  length,
895  elements);
896  return MarkAsCall(DefineFixed(result, x0), instr, CAN_DEOPTIMIZE_EAGERLY);
897 }
898 
899 
900 LInstruction* LChunkBuilder::DoArgumentsElements(HArgumentsElements* instr) {
901  info()->MarkAsRequiresFrame();
902  LOperand* temp = instr->from_inlined() ? NULL : TempRegister();
903  return DefineAsRegister(new(zone()) LArgumentsElements(temp));
904 }
905 
906 
907 LInstruction* LChunkBuilder::DoArgumentsLength(HArgumentsLength* instr) {
908  info()->MarkAsRequiresFrame();
909  LOperand* value = UseRegisterAtStart(instr->value());
910  return DefineAsRegister(new(zone()) LArgumentsLength(value));
911 }
912 
913 
914 LInstruction* LChunkBuilder::DoArgumentsObject(HArgumentsObject* instr) {
915  // There are no real uses of the arguments object.
916  // arguments.length and element access are supported directly on
917  // stack arguments, and any real arguments object use causes a bailout.
918  // So this value is never used.
919  return NULL;
920 }
921 
922 
923 LInstruction* LChunkBuilder::DoBitwise(HBitwise* instr) {
924  if (instr->representation().IsSmiOrInteger32()) {
925  ASSERT(instr->left()->representation().Equals(instr->representation()));
926  ASSERT(instr->right()->representation().Equals(instr->representation()));
927  ASSERT(instr->CheckFlag(HValue::kTruncatingToInt32));
928 
929  LOperand* left = UseRegisterAtStart(instr->BetterLeftOperand());
930  LOperand* right =
931  UseRegisterOrConstantAtStart(instr->BetterRightOperand());
932  return instr->representation().IsSmi() ?
933  DefineAsRegister(new(zone()) LBitS(left, right)) :
934  DefineAsRegister(new(zone()) LBitI(left, right));
935  } else {
936  return DoArithmeticT(instr->op(), instr);
937  }
938 }
939 
940 
941 LInstruction* LChunkBuilder::DoBlockEntry(HBlockEntry* instr) {
942  // V8 expects a label to be generated for each basic block.
943  // This is used in some places like LAllocator::IsBlockBoundary
944  // in lithium-allocator.cc
945  return new(zone()) LLabel(instr->block());
946 }
947 
948 
949 LInstruction* LChunkBuilder::DoBoundsCheck(HBoundsCheck* instr) {
950  LOperand* value = UseRegisterOrConstantAtStart(instr->index());
951  LOperand* length = UseRegister(instr->length());
952  return AssignEnvironment(new(zone()) LBoundsCheck(value, length));
953 }
954 
955 
956 LInstruction* LChunkBuilder::DoBranch(HBranch* instr) {
957  LInstruction* goto_instr = CheckElideControlInstruction(instr);
958  if (goto_instr != NULL) return goto_instr;
959 
960  HValue* value = instr->value();
961  Representation r = value->representation();
962  HType type = value->type();
963 
964  if (r.IsInteger32() || r.IsSmi() || r.IsDouble()) {
965  // These representations have simple checks that cannot deoptimize.
966  return new(zone()) LBranch(UseRegister(value), NULL, NULL);
967  } else {
968  ASSERT(r.IsTagged());
969  if (type.IsBoolean() || type.IsSmi() || type.IsJSArray() ||
970  type.IsHeapNumber()) {
971  // These types have simple checks that cannot deoptimize.
972  return new(zone()) LBranch(UseRegister(value), NULL, NULL);
973  }
974 
975  if (type.IsString()) {
976  // This type cannot deoptimize, but needs a scratch register.
977  return new(zone()) LBranch(UseRegister(value), TempRegister(), NULL);
978  }
979 
980  ToBooleanStub::Types expected = instr->expected_input_types();
981  bool needs_temps = expected.NeedsMap() || expected.IsEmpty();
982  LOperand* temp1 = needs_temps ? TempRegister() : NULL;
983  LOperand* temp2 = needs_temps ? TempRegister() : NULL;
984 
985  if (expected.IsGeneric() || expected.IsEmpty()) {
986  // The generic case cannot deoptimize because it already supports every
987  // possible input type.
988  ASSERT(needs_temps);
989  return new(zone()) LBranch(UseRegister(value), temp1, temp2);
990  } else {
991  return AssignEnvironment(
992  new(zone()) LBranch(UseRegister(value), temp1, temp2));
993  }
994  }
995 }
996 
997 
998 LInstruction* LChunkBuilder::DoCallJSFunction(
999  HCallJSFunction* instr) {
1000  LOperand* function = UseFixed(instr->function(), x1);
1001 
1002  LCallJSFunction* result = new(zone()) LCallJSFunction(function);
1003 
1004  return MarkAsCall(DefineFixed(result, x0), instr);
1005 }
1006 
1007 
1008 LInstruction* LChunkBuilder::DoCallWithDescriptor(
1009  HCallWithDescriptor* instr) {
1010  const CallInterfaceDescriptor* descriptor = instr->descriptor();
1011 
1012  LOperand* target = UseRegisterOrConstantAtStart(instr->target());
1013  ZoneList<LOperand*> ops(instr->OperandCount(), zone());
1014  ops.Add(target, zone());
1015  for (int i = 1; i < instr->OperandCount(); i++) {
1016  LOperand* op = UseFixed(instr->OperandAt(i),
1017  descriptor->GetParameterRegister(i - 1));
1018  ops.Add(op, zone());
1019  }
1020 
1021  LCallWithDescriptor* result = new(zone()) LCallWithDescriptor(descriptor,
1022  ops,
1023  zone());
1024  return MarkAsCall(DefineFixed(result, x0), instr);
1025 }
1026 
1027 
1028 LInstruction* LChunkBuilder::DoCallFunction(HCallFunction* instr) {
1029  LOperand* context = UseFixed(instr->context(), cp);
1030  LOperand* function = UseFixed(instr->function(), x1);
1031  LCallFunction* call = new(zone()) LCallFunction(context, function);
1032  return MarkAsCall(DefineFixed(call, x0), instr);
1033 }
1034 
1035 
1036 LInstruction* LChunkBuilder::DoCallNew(HCallNew* instr) {
1037  LOperand* context = UseFixed(instr->context(), cp);
1038  // The call to CallConstructStub will expect the constructor to be in x1.
1039  LOperand* constructor = UseFixed(instr->constructor(), x1);
1040  LCallNew* result = new(zone()) LCallNew(context, constructor);
1041  return MarkAsCall(DefineFixed(result, x0), instr);
1042 }
1043 
1044 
1045 LInstruction* LChunkBuilder::DoCallNewArray(HCallNewArray* instr) {
1046  LOperand* context = UseFixed(instr->context(), cp);
1047  // The call to ArrayConstructCode will expect the constructor to be in x1.
1048  LOperand* constructor = UseFixed(instr->constructor(), x1);
1049  LCallNewArray* result = new(zone()) LCallNewArray(context, constructor);
1050  return MarkAsCall(DefineFixed(result, x0), instr);
1051 }
1052 
1053 
1054 LInstruction* LChunkBuilder::DoCallRuntime(HCallRuntime* instr) {
1055  LOperand* context = UseFixed(instr->context(), cp);
1056  return MarkAsCall(DefineFixed(new(zone()) LCallRuntime(context), x0), instr);
1057 }
1058 
1059 
1060 LInstruction* LChunkBuilder::DoCallStub(HCallStub* instr) {
1061  LOperand* context = UseFixed(instr->context(), cp);
1062  return MarkAsCall(DefineFixed(new(zone()) LCallStub(context), x0), instr);
1063 }
1064 
1065 
1066 LInstruction* LChunkBuilder::DoCapturedObject(HCapturedObject* instr) {
1067  instr->ReplayEnvironment(current_block_->last_environment());
1068 
1069  // There are no real uses of a captured object.
1070  return NULL;
1071 }
1072 
1073 
1074 LInstruction* LChunkBuilder::DoChange(HChange* instr) {
1075  Representation from = instr->from();
1076  Representation to = instr->to();
1077 
1078  if (from.IsSmi()) {
1079  if (to.IsTagged()) {
1080  LOperand* value = UseRegister(instr->value());
1081  return DefineSameAsFirst(new(zone()) LDummyUse(value));
1082  }
1083  from = Representation::Tagged();
1084  }
1085 
1086  if (from.IsTagged()) {
1087  if (to.IsDouble()) {
1088  LOperand* value = UseRegister(instr->value());
1089  LOperand* temp = TempRegister();
1090  LNumberUntagD* res = new(zone()) LNumberUntagD(value, temp);
1091  return AssignEnvironment(DefineAsRegister(res));
1092  } else if (to.IsSmi()) {
1093  LOperand* value = UseRegister(instr->value());
1094  if (instr->value()->type().IsSmi()) {
1095  return DefineSameAsFirst(new(zone()) LDummyUse(value));
1096  }
1097  return AssignEnvironment(DefineSameAsFirst(new(zone()) LCheckSmi(value)));
1098  } else {
1099  ASSERT(to.IsInteger32());
1100  LInstruction* res = NULL;
1101 
1102  if (instr->value()->type().IsSmi() ||
1103  instr->value()->representation().IsSmi()) {
1104  LOperand* value = UseRegisterAtStart(instr->value());
1105  res = DefineAsRegister(new(zone()) LSmiUntag(value, false));
1106  } else {
1107  LOperand* value = UseRegister(instr->value());
1108  LOperand* temp1 = TempRegister();
1109  LOperand* temp2 = instr->CanTruncateToInt32() ? NULL : FixedTemp(d24);
1110  res = DefineAsRegister(new(zone()) LTaggedToI(value, temp1, temp2));
1111  res = AssignEnvironment(res);
1112  }
1113 
1114  return res;
1115  }
1116  } else if (from.IsDouble()) {
1117  if (to.IsTagged()) {
1118  info()->MarkAsDeferredCalling();
1119  LOperand* value = UseRegister(instr->value());
1120  LOperand* temp1 = TempRegister();
1121  LOperand* temp2 = TempRegister();
1122 
1123  LNumberTagD* result = new(zone()) LNumberTagD(value, temp1, temp2);
1124  return AssignPointerMap(DefineAsRegister(result));
1125  } else {
1126  ASSERT(to.IsSmi() || to.IsInteger32());
1127  LOperand* value = UseRegister(instr->value());
1128 
1129  if (instr->CanTruncateToInt32()) {
1130  LTruncateDoubleToIntOrSmi* result =
1131  new(zone()) LTruncateDoubleToIntOrSmi(value);
1132  return DefineAsRegister(result);
1133  } else {
1134  LDoubleToIntOrSmi* result = new(zone()) LDoubleToIntOrSmi(value);
1135  return AssignEnvironment(DefineAsRegister(result));
1136  }
1137  }
1138  } else if (from.IsInteger32()) {
1139  info()->MarkAsDeferredCalling();
1140  if (to.IsTagged()) {
1141  if (instr->value()->CheckFlag(HInstruction::kUint32)) {
1142  LOperand* value = UseRegister(instr->value());
1143  LNumberTagU* result = new(zone()) LNumberTagU(value,
1144  TempRegister(),
1145  TempRegister());
1146  return AssignEnvironment(AssignPointerMap(DefineAsRegister(result)));
1147  } else {
1149  (kMaxInt == Smi::kMaxValue));
1150  LOperand* value = UseRegisterAtStart(instr->value());
1151  return DefineAsRegister(new(zone()) LSmiTag(value));
1152  }
1153  } else if (to.IsSmi()) {
1154  LOperand* value = UseRegisterAtStart(instr->value());
1155  LInstruction* result = DefineAsRegister(new(zone()) LSmiTag(value));
1156  if (instr->value()->CheckFlag(HInstruction::kUint32)) {
1157  result = AssignEnvironment(result);
1158  }
1159  return result;
1160  } else {
1161  ASSERT(to.IsDouble());
1162  if (instr->value()->CheckFlag(HInstruction::kUint32)) {
1163  return DefineAsRegister(
1164  new(zone()) LUint32ToDouble(UseRegisterAtStart(instr->value())));
1165  } else {
1166  return DefineAsRegister(
1167  new(zone()) LInteger32ToDouble(UseRegisterAtStart(instr->value())));
1168  }
1169  }
1170  }
1171 
1172  UNREACHABLE();
1173  return NULL;
1174 }
1175 
1176 
1177 LInstruction* LChunkBuilder::DoCheckValue(HCheckValue* instr) {
1178  LOperand* value = UseRegisterAtStart(instr->value());
1179  return AssignEnvironment(new(zone()) LCheckValue(value));
1180 }
1181 
1182 
1183 LInstruction* LChunkBuilder::DoCheckInstanceType(HCheckInstanceType* instr) {
1184  LOperand* value = UseRegisterAtStart(instr->value());
1185  LOperand* temp = TempRegister();
1186  LInstruction* result = new(zone()) LCheckInstanceType(value, temp);
1187  return AssignEnvironment(result);
1188 }
1189 
1190 
1191 LInstruction* LChunkBuilder::DoCheckMaps(HCheckMaps* instr) {
1192  if (instr->CanOmitMapChecks()) {
1193  // LCheckMaps does nothing in this case.
1194  return new(zone()) LCheckMaps(NULL);
1195  } else {
1196  LOperand* value = UseRegisterAtStart(instr->value());
1197  LOperand* temp = TempRegister();
1198 
1199  if (instr->has_migration_target()) {
1200  info()->MarkAsDeferredCalling();
1201  LInstruction* result = new(zone()) LCheckMaps(value, temp);
1202  return AssignPointerMap(AssignEnvironment(result));
1203  } else {
1204  return AssignEnvironment(new(zone()) LCheckMaps(value, temp));
1205  }
1206  }
1207 }
1208 
1209 
1210 LInstruction* LChunkBuilder::DoCheckHeapObject(HCheckHeapObject* instr) {
1211  LOperand* value = UseRegisterAtStart(instr->value());
1212  return AssignEnvironment(new(zone()) LCheckNonSmi(value));
1213 }
1214 
1215 
1216 LInstruction* LChunkBuilder::DoCheckSmi(HCheckSmi* instr) {
1217  LOperand* value = UseRegisterAtStart(instr->value());
1218  return AssignEnvironment(new(zone()) LCheckSmi(value));
1219 }
1220 
1221 
1222 LInstruction* LChunkBuilder::DoClampToUint8(HClampToUint8* instr) {
1223  HValue* value = instr->value();
1224  Representation input_rep = value->representation();
1225  LOperand* reg = UseRegister(value);
1226  if (input_rep.IsDouble()) {
1227  return DefineAsRegister(new(zone()) LClampDToUint8(reg));
1228  } else if (input_rep.IsInteger32()) {
1229  return DefineAsRegister(new(zone()) LClampIToUint8(reg));
1230  } else {
1231  ASSERT(input_rep.IsSmiOrTagged());
1232  return AssignEnvironment(
1233  DefineAsRegister(new(zone()) LClampTToUint8(reg,
1234  TempRegister(),
1235  FixedTemp(d24))));
1236  }
1237 }
1238 
1239 
1240 LInstruction* LChunkBuilder::DoClassOfTestAndBranch(
1241  HClassOfTestAndBranch* instr) {
1242  ASSERT(instr->value()->representation().IsTagged());
1243  LOperand* value = UseRegisterAtStart(instr->value());
1244  return new(zone()) LClassOfTestAndBranch(value,
1245  TempRegister(),
1246  TempRegister());
1247 }
1248 
1249 
1250 LInstruction* LChunkBuilder::DoCompareNumericAndBranch(
1251  HCompareNumericAndBranch* instr) {
1252  Representation r = instr->representation();
1253 
1254  if (r.IsSmiOrInteger32()) {
1255  ASSERT(instr->left()->representation().Equals(r));
1256  ASSERT(instr->right()->representation().Equals(r));
1257  LOperand* left = UseRegisterOrConstantAtStart(instr->left());
1258  LOperand* right = UseRegisterOrConstantAtStart(instr->right());
1259  return new(zone()) LCompareNumericAndBranch(left, right);
1260  } else {
1261  ASSERT(r.IsDouble());
1262  ASSERT(instr->left()->representation().IsDouble());
1263  ASSERT(instr->right()->representation().IsDouble());
1264  // TODO(all): In fact the only case that we can handle more efficiently is
1265  // when one of the operand is the constant 0. Currently the MacroAssembler
1266  // will be able to cope with any constant by loading it into an internal
1267  // scratch register. This means that if the constant is used more that once,
1268  // it will be loaded multiple times. Unfortunatly crankshaft already
1269  // duplicates constant loads, but we should modify the code below once this
1270  // issue has been addressed in crankshaft.
1271  LOperand* left = UseRegisterOrConstantAtStart(instr->left());
1272  LOperand* right = UseRegisterOrConstantAtStart(instr->right());
1273  return new(zone()) LCompareNumericAndBranch(left, right);
1274  }
1275 }
1276 
1277 
1278 LInstruction* LChunkBuilder::DoCompareGeneric(HCompareGeneric* instr) {
1279  ASSERT(instr->left()->representation().IsTagged());
1280  ASSERT(instr->right()->representation().IsTagged());
1281  LOperand* context = UseFixed(instr->context(), cp);
1282  LOperand* left = UseFixed(instr->left(), x1);
1283  LOperand* right = UseFixed(instr->right(), x0);
1284  LCmpT* result = new(zone()) LCmpT(context, left, right);
1285  return MarkAsCall(DefineFixed(result, x0), instr);
1286 }
1287 
1288 
1289 LInstruction* LChunkBuilder::DoCompareHoleAndBranch(
1290  HCompareHoleAndBranch* instr) {
1291  LOperand* value = UseRegister(instr->value());
1292  if (instr->representation().IsTagged()) {
1293  return new(zone()) LCmpHoleAndBranchT(value);
1294  } else {
1295  LOperand* temp = TempRegister();
1296  return new(zone()) LCmpHoleAndBranchD(value, temp);
1297  }
1298 }
1299 
1300 
1301 LInstruction* LChunkBuilder::DoCompareObjectEqAndBranch(
1302  HCompareObjectEqAndBranch* instr) {
1303  LInstruction* goto_instr = CheckElideControlInstruction(instr);
1304  if (goto_instr != NULL) return goto_instr;
1305 
1306  LOperand* left = UseRegisterAtStart(instr->left());
1307  LOperand* right = UseRegisterAtStart(instr->right());
1308  return new(zone()) LCmpObjectEqAndBranch(left, right);
1309 }
1310 
1311 
1312 LInstruction* LChunkBuilder::DoCompareMap(HCompareMap* instr) {
1313  LInstruction* goto_instr = CheckElideControlInstruction(instr);
1314  if (goto_instr != NULL) return goto_instr;
1315 
1316  ASSERT(instr->value()->representation().IsTagged());
1317  LOperand* value = UseRegisterAtStart(instr->value());
1318  LOperand* temp = TempRegister();
1319  return new(zone()) LCmpMapAndBranch(value, temp);
1320 }
1321 
1322 
1323 LInstruction* LChunkBuilder::DoConstant(HConstant* instr) {
1324  Representation r = instr->representation();
1325  if (r.IsSmi()) {
1326  return DefineAsRegister(new(zone()) LConstantS);
1327  } else if (r.IsInteger32()) {
1328  return DefineAsRegister(new(zone()) LConstantI);
1329  } else if (r.IsDouble()) {
1330  return DefineAsRegister(new(zone()) LConstantD);
1331  } else if (r.IsExternal()) {
1332  return DefineAsRegister(new(zone()) LConstantE);
1333  } else if (r.IsTagged()) {
1334  return DefineAsRegister(new(zone()) LConstantT);
1335  } else {
1336  UNREACHABLE();
1337  return NULL;
1338  }
1339 }
1340 
1341 
1342 LInstruction* LChunkBuilder::DoContext(HContext* instr) {
1343  if (instr->HasNoUses()) return NULL;
1344 
1345  if (info()->IsStub()) {
1346  return DefineFixed(new(zone()) LContext, cp);
1347  }
1348 
1349  return DefineAsRegister(new(zone()) LContext);
1350 }
1351 
1352 
1353 LInstruction* LChunkBuilder::DoDateField(HDateField* instr) {
1354  LOperand* object = UseFixed(instr->value(), x0);
1355  LDateField* result = new(zone()) LDateField(object, instr->index());
1356  return MarkAsCall(DefineFixed(result, x0), instr, CAN_DEOPTIMIZE_EAGERLY);
1357 }
1358 
1359 
1360 LInstruction* LChunkBuilder::DoDebugBreak(HDebugBreak* instr) {
1361  return new(zone()) LDebugBreak();
1362 }
1363 
1364 
1365 LInstruction* LChunkBuilder::DoDeclareGlobals(HDeclareGlobals* instr) {
1366  LOperand* context = UseFixed(instr->context(), cp);
1367  return MarkAsCall(new(zone()) LDeclareGlobals(context), instr);
1368 }
1369 
1370 
1371 LInstruction* LChunkBuilder::DoDeoptimize(HDeoptimize* instr) {
1372  return AssignEnvironment(new(zone()) LDeoptimize);
1373 }
1374 
1375 
1376 LInstruction* LChunkBuilder::DoDivByPowerOf2I(HDiv* instr) {
1377  ASSERT(instr->representation().IsInteger32());
1378  ASSERT(instr->left()->representation().Equals(instr->representation()));
1379  ASSERT(instr->right()->representation().Equals(instr->representation()));
1380  LOperand* dividend = UseRegister(instr->left());
1381  int32_t divisor = instr->right()->GetInteger32Constant();
1382  LInstruction* result = DefineAsRegister(new(zone()) LDivByPowerOf2I(
1383  dividend, divisor));
1384  if ((instr->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) ||
1385  (instr->CheckFlag(HValue::kCanOverflow) && divisor == -1) ||
1386  (!instr->CheckFlag(HInstruction::kAllUsesTruncatingToInt32) &&
1387  divisor != 1 && divisor != -1)) {
1388  result = AssignEnvironment(result);
1389  }
1390  return result;
1391 }
1392 
1393 
1394 LInstruction* LChunkBuilder::DoDivByConstI(HDiv* instr) {
1395  ASSERT(instr->representation().IsInteger32());
1396  ASSERT(instr->left()->representation().Equals(instr->representation()));
1397  ASSERT(instr->right()->representation().Equals(instr->representation()));
1398  LOperand* dividend = UseRegister(instr->left());
1399  int32_t divisor = instr->right()->GetInteger32Constant();
1400  LOperand* temp = instr->CheckFlag(HInstruction::kAllUsesTruncatingToInt32)
1401  ? NULL : TempRegister();
1402  LInstruction* result = DefineAsRegister(new(zone()) LDivByConstI(
1403  dividend, divisor, temp));
1404  if (divisor == 0 ||
1405  (instr->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) ||
1406  !instr->CheckFlag(HInstruction::kAllUsesTruncatingToInt32)) {
1407  result = AssignEnvironment(result);
1408  }
1409  return result;
1410 }
1411 
1412 
1413 LInstruction* LChunkBuilder::DoDivI(HBinaryOperation* instr) {
1414  ASSERT(instr->representation().IsSmiOrInteger32());
1415  ASSERT(instr->left()->representation().Equals(instr->representation()));
1416  ASSERT(instr->right()->representation().Equals(instr->representation()));
1417  LOperand* dividend = UseRegister(instr->left());
1418  LOperand* divisor = UseRegister(instr->right());
1419  LOperand* temp = instr->CheckFlag(HInstruction::kAllUsesTruncatingToInt32)
1420  ? NULL : TempRegister();
1421  LDivI* div = new(zone()) LDivI(dividend, divisor, temp);
1422  return AssignEnvironment(DefineAsRegister(div));
1423 }
1424 
1425 
1426 LInstruction* LChunkBuilder::DoDiv(HDiv* instr) {
1427  if (instr->representation().IsSmiOrInteger32()) {
1428  if (instr->RightIsPowerOf2()) {
1429  return DoDivByPowerOf2I(instr);
1430  } else if (instr->right()->IsConstant()) {
1431  return DoDivByConstI(instr);
1432  } else {
1433  return DoDivI(instr);
1434  }
1435  } else if (instr->representation().IsDouble()) {
1436  return DoArithmeticD(Token::DIV, instr);
1437  } else {
1438  return DoArithmeticT(Token::DIV, instr);
1439  }
1440 }
1441 
1442 
1443 LInstruction* LChunkBuilder::DoDummyUse(HDummyUse* instr) {
1444  return DefineAsRegister(new(zone()) LDummyUse(UseAny(instr->value())));
1445 }
1446 
1447 
1448 LInstruction* LChunkBuilder::DoEnterInlined(HEnterInlined* instr) {
1449  HEnvironment* outer = current_block_->last_environment();
1450  HConstant* undefined = graph()->GetConstantUndefined();
1451  HEnvironment* inner = outer->CopyForInlining(instr->closure(),
1452  instr->arguments_count(),
1453  instr->function(),
1454  undefined,
1455  instr->inlining_kind());
1456  // Only replay binding of arguments object if it wasn't removed from graph.
1457  if ((instr->arguments_var() != NULL) &&
1458  instr->arguments_object()->IsLinked()) {
1459  inner->Bind(instr->arguments_var(), instr->arguments_object());
1460  }
1461  inner->set_entry(instr);
1462  current_block_->UpdateEnvironment(inner);
1463  chunk_->AddInlinedClosure(instr->closure());
1464  return NULL;
1465 }
1466 
1467 
1468 LInstruction* LChunkBuilder::DoEnvironmentMarker(HEnvironmentMarker* instr) {
1469  UNREACHABLE();
1470  return NULL;
1471 }
1472 
1473 
1474 LInstruction* LChunkBuilder::DoForceRepresentation(
1475  HForceRepresentation* instr) {
1476  // All HForceRepresentation instructions should be eliminated in the
1477  // representation change phase of Hydrogen.
1478  UNREACHABLE();
1479  return NULL;
1480 }
1481 
1482 
1483 LInstruction* LChunkBuilder::DoFunctionLiteral(HFunctionLiteral* instr) {
1484  LOperand* context = UseFixed(instr->context(), cp);
1485  return MarkAsCall(
1486  DefineFixed(new(zone()) LFunctionLiteral(context), x0), instr);
1487 }
1488 
1489 
1490 LInstruction* LChunkBuilder::DoGetCachedArrayIndex(
1491  HGetCachedArrayIndex* instr) {
1492  ASSERT(instr->value()->representation().IsTagged());
1493  LOperand* value = UseRegisterAtStart(instr->value());
1494  return DefineAsRegister(new(zone()) LGetCachedArrayIndex(value));
1495 }
1496 
1497 
1498 LInstruction* LChunkBuilder::DoGoto(HGoto* instr) {
1499  return new(zone()) LGoto(instr->FirstSuccessor());
1500 }
1501 
1502 
1503 LInstruction* LChunkBuilder::DoHasCachedArrayIndexAndBranch(
1504  HHasCachedArrayIndexAndBranch* instr) {
1505  ASSERT(instr->value()->representation().IsTagged());
1506  return new(zone()) LHasCachedArrayIndexAndBranch(
1507  UseRegisterAtStart(instr->value()), TempRegister());
1508 }
1509 
1510 
1511 LInstruction* LChunkBuilder::DoHasInstanceTypeAndBranch(
1512  HHasInstanceTypeAndBranch* instr) {
1513  ASSERT(instr->value()->representation().IsTagged());
1514  LOperand* value = UseRegisterAtStart(instr->value());
1515  return new(zone()) LHasInstanceTypeAndBranch(value, TempRegister());
1516 }
1517 
1518 
1519 LInstruction* LChunkBuilder::DoInnerAllocatedObject(
1520  HInnerAllocatedObject* instr) {
1521  LOperand* base_object = UseRegisterAtStart(instr->base_object());
1522  LOperand* offset = UseRegisterOrConstantAtStart(instr->offset());
1523  return DefineAsRegister(
1524  new(zone()) LInnerAllocatedObject(base_object, offset));
1525 }
1526 
1527 
1528 LInstruction* LChunkBuilder::DoInstanceOf(HInstanceOf* instr) {
1529  LOperand* context = UseFixed(instr->context(), cp);
1530  LInstanceOf* result = new(zone()) LInstanceOf(
1531  context,
1532  UseFixed(instr->left(), InstanceofStub::left()),
1533  UseFixed(instr->right(), InstanceofStub::right()));
1534  return MarkAsCall(DefineFixed(result, x0), instr);
1535 }
1536 
1537 
1538 LInstruction* LChunkBuilder::DoInstanceOfKnownGlobal(
1539  HInstanceOfKnownGlobal* instr) {
1540  LInstanceOfKnownGlobal* result = new(zone()) LInstanceOfKnownGlobal(
1541  UseFixed(instr->context(), cp),
1542  UseFixed(instr->left(), InstanceofStub::left()));
1543  return MarkAsCall(DefineFixed(result, x0), instr);
1544 }
1545 
1546 
1547 LInstruction* LChunkBuilder::DoInvokeFunction(HInvokeFunction* instr) {
1548  LOperand* context = UseFixed(instr->context(), cp);
1549  // The function is required (by MacroAssembler::InvokeFunction) to be in x1.
1550  LOperand* function = UseFixed(instr->function(), x1);
1551  LInvokeFunction* result = new(zone()) LInvokeFunction(context, function);
1552  return MarkAsCall(DefineFixed(result, x0), instr, CANNOT_DEOPTIMIZE_EAGERLY);
1553 }
1554 
1555 
1556 LInstruction* LChunkBuilder::DoIsConstructCallAndBranch(
1557  HIsConstructCallAndBranch* instr) {
1558  return new(zone()) LIsConstructCallAndBranch(TempRegister(), TempRegister());
1559 }
1560 
1561 
1562 LInstruction* LChunkBuilder::DoCompareMinusZeroAndBranch(
1563  HCompareMinusZeroAndBranch* instr) {
1564  LInstruction* goto_instr = CheckElideControlInstruction(instr);
1565  if (goto_instr != NULL) return goto_instr;
1566  LOperand* value = UseRegister(instr->value());
1567  LOperand* scratch = TempRegister();
1568  return new(zone()) LCompareMinusZeroAndBranch(value, scratch);
1569 }
1570 
1571 
1572 LInstruction* LChunkBuilder::DoIsObjectAndBranch(HIsObjectAndBranch* instr) {
1573  ASSERT(instr->value()->representation().IsTagged());
1574  LOperand* value = UseRegisterAtStart(instr->value());
1575  LOperand* temp1 = TempRegister();
1576  LOperand* temp2 = TempRegister();
1577  return new(zone()) LIsObjectAndBranch(value, temp1, temp2);
1578 }
1579 
1580 
1581 LInstruction* LChunkBuilder::DoIsStringAndBranch(HIsStringAndBranch* instr) {
1582  ASSERT(instr->value()->representation().IsTagged());
1583  LOperand* value = UseRegisterAtStart(instr->value());
1584  LOperand* temp = TempRegister();
1585  return new(zone()) LIsStringAndBranch(value, temp);
1586 }
1587 
1588 
1589 LInstruction* LChunkBuilder::DoIsSmiAndBranch(HIsSmiAndBranch* instr) {
1590  ASSERT(instr->value()->representation().IsTagged());
1591  return new(zone()) LIsSmiAndBranch(UseRegisterAtStart(instr->value()));
1592 }
1593 
1594 
1595 LInstruction* LChunkBuilder::DoIsUndetectableAndBranch(
1596  HIsUndetectableAndBranch* instr) {
1597  ASSERT(instr->value()->representation().IsTagged());
1598  LOperand* value = UseRegisterAtStart(instr->value());
1599  return new(zone()) LIsUndetectableAndBranch(value, TempRegister());
1600 }
1601 
1602 
1603 LInstruction* LChunkBuilder::DoLeaveInlined(HLeaveInlined* instr) {
1604  LInstruction* pop = NULL;
1605  HEnvironment* env = current_block_->last_environment();
1606 
1607  if (env->entry()->arguments_pushed()) {
1608  int argument_count = env->arguments_environment()->parameter_count();
1609  pop = new(zone()) LDrop(argument_count);
1610  ASSERT(instr->argument_delta() == -argument_count);
1611  }
1612 
1613  HEnvironment* outer =
1614  current_block_->last_environment()->DiscardInlined(false);
1615  current_block_->UpdateEnvironment(outer);
1616 
1617  return pop;
1618 }
1619 
1620 
1621 LInstruction* LChunkBuilder::DoLoadContextSlot(HLoadContextSlot* instr) {
1622  LOperand* context = UseRegisterAtStart(instr->value());
1623  LInstruction* result =
1624  DefineAsRegister(new(zone()) LLoadContextSlot(context));
1625  return instr->RequiresHoleCheck() ? AssignEnvironment(result) : result;
1626 }
1627 
1628 
1629 LInstruction* LChunkBuilder::DoLoadFunctionPrototype(
1630  HLoadFunctionPrototype* instr) {
1631  LOperand* function = UseRegister(instr->function());
1632  LOperand* temp = TempRegister();
1633  return AssignEnvironment(DefineAsRegister(
1634  new(zone()) LLoadFunctionPrototype(function, temp)));
1635 }
1636 
1637 
1638 LInstruction* LChunkBuilder::DoLoadGlobalCell(HLoadGlobalCell* instr) {
1639  LLoadGlobalCell* result = new(zone()) LLoadGlobalCell();
1640  return instr->RequiresHoleCheck()
1641  ? AssignEnvironment(DefineAsRegister(result))
1642  : DefineAsRegister(result);
1643 }
1644 
1645 
1646 LInstruction* LChunkBuilder::DoLoadGlobalGeneric(HLoadGlobalGeneric* instr) {
1647  LOperand* context = UseFixed(instr->context(), cp);
1648  LOperand* global_object = UseFixed(instr->global_object(), x0);
1649  LLoadGlobalGeneric* result =
1650  new(zone()) LLoadGlobalGeneric(context, global_object);
1651  return MarkAsCall(DefineFixed(result, x0), instr);
1652 }
1653 
1654 
1655 LInstruction* LChunkBuilder::DoLoadKeyed(HLoadKeyed* instr) {
1656  ASSERT(instr->key()->representation().IsSmiOrInteger32());
1657  ElementsKind elements_kind = instr->elements_kind();
1658  LOperand* elements = UseRegister(instr->elements());
1659  LOperand* key = UseRegisterOrConstantAtStart(instr->key());
1660 
1661  if (!instr->is_typed_elements()) {
1662  if (instr->representation().IsDouble()) {
1663  LOperand* temp = (!instr->key()->IsConstant() ||
1664  instr->RequiresHoleCheck())
1665  ? TempRegister()
1666  : NULL;
1667 
1668  LLoadKeyedFixedDouble* result =
1669  new(zone()) LLoadKeyedFixedDouble(elements, key, temp);
1670  return instr->RequiresHoleCheck()
1671  ? AssignEnvironment(DefineAsRegister(result))
1672  : DefineAsRegister(result);
1673  } else {
1674  ASSERT(instr->representation().IsSmiOrTagged() ||
1675  instr->representation().IsInteger32());
1676  LOperand* temp = instr->key()->IsConstant() ? NULL : TempRegister();
1677  LLoadKeyedFixed* result =
1678  new(zone()) LLoadKeyedFixed(elements, key, temp);
1679  return instr->RequiresHoleCheck()
1680  ? AssignEnvironment(DefineAsRegister(result))
1681  : DefineAsRegister(result);
1682  }
1683  } else {
1684  ASSERT((instr->representation().IsInteger32() &&
1685  !IsDoubleOrFloatElementsKind(instr->elements_kind())) ||
1686  (instr->representation().IsDouble() &&
1687  IsDoubleOrFloatElementsKind(instr->elements_kind())));
1688 
1689  LOperand* temp = instr->key()->IsConstant() ? NULL : TempRegister();
1690  LLoadKeyedExternal* result =
1691  new(zone()) LLoadKeyedExternal(elements, key, temp);
1692  // An unsigned int array load might overflow and cause a deopt. Make sure it
1693  // has an environment.
1694  if (instr->RequiresHoleCheck() ||
1695  elements_kind == EXTERNAL_UINT32_ELEMENTS ||
1696  elements_kind == UINT32_ELEMENTS) {
1697  return AssignEnvironment(DefineAsRegister(result));
1698  } else {
1699  return DefineAsRegister(result);
1700  }
1701  }
1702 }
1703 
1704 
1705 LInstruction* LChunkBuilder::DoLoadKeyedGeneric(HLoadKeyedGeneric* instr) {
1706  LOperand* context = UseFixed(instr->context(), cp);
1707  LOperand* object = UseFixed(instr->object(), x1);
1708  LOperand* key = UseFixed(instr->key(), x0);
1709 
1710  LInstruction* result =
1711  DefineFixed(new(zone()) LLoadKeyedGeneric(context, object, key), x0);
1712  return MarkAsCall(result, instr);
1713 }
1714 
1715 
1716 LInstruction* LChunkBuilder::DoLoadNamedField(HLoadNamedField* instr) {
1717  LOperand* object = UseRegisterAtStart(instr->object());
1718  return DefineAsRegister(new(zone()) LLoadNamedField(object));
1719 }
1720 
1721 
1722 LInstruction* LChunkBuilder::DoLoadNamedGeneric(HLoadNamedGeneric* instr) {
1723  LOperand* context = UseFixed(instr->context(), cp);
1724  LOperand* object = UseFixed(instr->object(), x0);
1725  LInstruction* result =
1726  DefineFixed(new(zone()) LLoadNamedGeneric(context, object), x0);
1727  return MarkAsCall(result, instr);
1728 }
1729 
1730 
1731 LInstruction* LChunkBuilder::DoLoadRoot(HLoadRoot* instr) {
1732  return DefineAsRegister(new(zone()) LLoadRoot);
1733 }
1734 
1735 
1736 LInstruction* LChunkBuilder::DoMapEnumLength(HMapEnumLength* instr) {
1737  LOperand* map = UseRegisterAtStart(instr->value());
1738  return DefineAsRegister(new(zone()) LMapEnumLength(map));
1739 }
1740 
1741 
1742 LInstruction* LChunkBuilder::DoFlooringDivByPowerOf2I(HMathFloorOfDiv* instr) {
1743  ASSERT(instr->representation().IsInteger32());
1744  ASSERT(instr->left()->representation().Equals(instr->representation()));
1745  ASSERT(instr->right()->representation().Equals(instr->representation()));
1746  LOperand* dividend = UseRegisterAtStart(instr->left());
1747  int32_t divisor = instr->right()->GetInteger32Constant();
1748  LInstruction* result = DefineAsRegister(new(zone()) LFlooringDivByPowerOf2I(
1749  dividend, divisor));
1750  if ((instr->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) ||
1751  (instr->CheckFlag(HValue::kLeftCanBeMinInt) && divisor == -1)) {
1752  result = AssignEnvironment(result);
1753  }
1754  return result;
1755 }
1756 
1757 
1758 LInstruction* LChunkBuilder::DoFlooringDivByConstI(HMathFloorOfDiv* instr) {
1759  ASSERT(instr->representation().IsInteger32());
1760  ASSERT(instr->left()->representation().Equals(instr->representation()));
1761  ASSERT(instr->right()->representation().Equals(instr->representation()));
1762  LOperand* dividend = UseRegister(instr->left());
1763  int32_t divisor = instr->right()->GetInteger32Constant();
1764  LOperand* temp =
1765  ((divisor > 0 && !instr->CheckFlag(HValue::kLeftCanBeNegative)) ||
1766  (divisor < 0 && !instr->CheckFlag(HValue::kLeftCanBePositive))) ?
1767  NULL : TempRegister();
1768  LInstruction* result = DefineAsRegister(
1769  new(zone()) LFlooringDivByConstI(dividend, divisor, temp));
1770  if (divisor == 0 ||
1771  (instr->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0)) {
1772  result = AssignEnvironment(result);
1773  }
1774  return result;
1775 }
1776 
1777 
1778 LInstruction* LChunkBuilder::DoFlooringDivI(HMathFloorOfDiv* instr) {
1779  LOperand* dividend = UseRegister(instr->left());
1780  LOperand* divisor = UseRegister(instr->right());
1781  LOperand* remainder = TempRegister();
1782  LInstruction* result =
1783  DefineAsRegister(new(zone()) LFlooringDivI(dividend, divisor, remainder));
1784  return AssignEnvironment(result);
1785 }
1786 
1787 
1788 LInstruction* LChunkBuilder::DoMathFloorOfDiv(HMathFloorOfDiv* instr) {
1789  if (instr->RightIsPowerOf2()) {
1790  return DoFlooringDivByPowerOf2I(instr);
1791  } else if (instr->right()->IsConstant()) {
1792  return DoFlooringDivByConstI(instr);
1793  } else {
1794  return DoFlooringDivI(instr);
1795  }
1796 }
1797 
1798 
1799 LInstruction* LChunkBuilder::DoMathMinMax(HMathMinMax* instr) {
1800  LOperand* left = NULL;
1801  LOperand* right = NULL;
1802  if (instr->representation().IsSmiOrInteger32()) {
1803  ASSERT(instr->left()->representation().Equals(instr->representation()));
1804  ASSERT(instr->right()->representation().Equals(instr->representation()));
1805  left = UseRegisterAtStart(instr->BetterLeftOperand());
1806  right = UseRegisterOrConstantAtStart(instr->BetterRightOperand());
1807  } else {
1808  ASSERT(instr->representation().IsDouble());
1809  ASSERT(instr->left()->representation().IsDouble());
1810  ASSERT(instr->right()->representation().IsDouble());
1811  left = UseRegisterAtStart(instr->left());
1812  right = UseRegisterAtStart(instr->right());
1813  }
1814  return DefineAsRegister(new(zone()) LMathMinMax(left, right));
1815 }
1816 
1817 
1818 LInstruction* LChunkBuilder::DoModByPowerOf2I(HMod* instr) {
1819  ASSERT(instr->representation().IsInteger32());
1820  ASSERT(instr->left()->representation().Equals(instr->representation()));
1821  ASSERT(instr->right()->representation().Equals(instr->representation()));
1822  LOperand* dividend = UseRegisterAtStart(instr->left());
1823  int32_t divisor = instr->right()->GetInteger32Constant();
1824  LInstruction* result = DefineSameAsFirst(new(zone()) LModByPowerOf2I(
1825  dividend, divisor));
1826  if (instr->CheckFlag(HValue::kBailoutOnMinusZero)) {
1827  result = AssignEnvironment(result);
1828  }
1829  return result;
1830 }
1831 
1832 
1833 LInstruction* LChunkBuilder::DoModByConstI(HMod* instr) {
1834  ASSERT(instr->representation().IsInteger32());
1835  ASSERT(instr->left()->representation().Equals(instr->representation()));
1836  ASSERT(instr->right()->representation().Equals(instr->representation()));
1837  LOperand* dividend = UseRegister(instr->left());
1838  int32_t divisor = instr->right()->GetInteger32Constant();
1839  LOperand* temp = TempRegister();
1840  LInstruction* result = DefineAsRegister(new(zone()) LModByConstI(
1841  dividend, divisor, temp));
1842  if (divisor == 0 || instr->CheckFlag(HValue::kBailoutOnMinusZero)) {
1843  result = AssignEnvironment(result);
1844  }
1845  return result;
1846 }
1847 
1848 
1849 LInstruction* LChunkBuilder::DoModI(HMod* instr) {
1850  ASSERT(instr->representation().IsSmiOrInteger32());
1851  ASSERT(instr->left()->representation().Equals(instr->representation()));
1852  ASSERT(instr->right()->representation().Equals(instr->representation()));
1853  LOperand* dividend = UseRegister(instr->left());
1854  LOperand* divisor = UseRegister(instr->right());
1855  LInstruction* result = DefineAsRegister(new(zone()) LModI(dividend, divisor));
1856  if (instr->CheckFlag(HValue::kCanBeDivByZero) ||
1857  instr->CheckFlag(HValue::kBailoutOnMinusZero)) {
1858  result = AssignEnvironment(result);
1859  }
1860  return result;
1861 }
1862 
1863 
1864 LInstruction* LChunkBuilder::DoMod(HMod* instr) {
1865  if (instr->representation().IsSmiOrInteger32()) {
1866  if (instr->RightIsPowerOf2()) {
1867  return DoModByPowerOf2I(instr);
1868  } else if (instr->right()->IsConstant()) {
1869  return DoModByConstI(instr);
1870  } else {
1871  return DoModI(instr);
1872  }
1873  } else if (instr->representation().IsDouble()) {
1874  return DoArithmeticD(Token::MOD, instr);
1875  } else {
1876  return DoArithmeticT(Token::MOD, instr);
1877  }
1878 }
1879 
1880 
1881 LInstruction* LChunkBuilder::DoMul(HMul* instr) {
1882  if (instr->representation().IsSmiOrInteger32()) {
1883  ASSERT(instr->left()->representation().Equals(instr->representation()));
1884  ASSERT(instr->right()->representation().Equals(instr->representation()));
1885 
1886  bool can_overflow = instr->CheckFlag(HValue::kCanOverflow);
1887  bool bailout_on_minus_zero = instr->CheckFlag(HValue::kBailoutOnMinusZero);
1888  bool needs_environment = can_overflow || bailout_on_minus_zero;
1889 
1890  HValue* least_const = instr->BetterLeftOperand();
1891  HValue* most_const = instr->BetterRightOperand();
1892 
1893  LOperand* left;
1894 
1895  // LMulConstI can handle a subset of constants:
1896  // With support for overflow detection:
1897  // -1, 0, 1, 2
1898  // 2^n, -(2^n)
1899  // Without support for overflow detection:
1900  // 2^n + 1, -(2^n - 1)
1901  if (most_const->IsConstant()) {
1902  int32_t constant = HConstant::cast(most_const)->Integer32Value();
1903  bool small_constant = (constant >= -1) && (constant <= 2);
1904  bool end_range_constant = (constant <= -kMaxInt) || (constant == kMaxInt);
1905  int32_t constant_abs = Abs(constant);
1906 
1907  if (!end_range_constant &&
1908  (small_constant ||
1909  (IsPowerOf2(constant_abs)) ||
1910  (!can_overflow && (IsPowerOf2(constant_abs + 1) ||
1911  IsPowerOf2(constant_abs - 1))))) {
1912  LConstantOperand* right = UseConstant(most_const);
1913  bool need_register = IsPowerOf2(constant_abs) && !small_constant;
1914  left = need_register ? UseRegister(least_const)
1915  : UseRegisterAtStart(least_const);
1916  LMulConstIS* mul = new(zone()) LMulConstIS(left, right);
1917  if (needs_environment) AssignEnvironment(mul);
1918  return DefineAsRegister(mul);
1919  }
1920  }
1921 
1922  left = UseRegisterAtStart(least_const);
1923  // LMulI/S can handle all cases, but it requires that a register is
1924  // allocated for the second operand.
1925  LInstruction* result;
1926  if (instr->representation().IsSmi()) {
1927  LOperand* right = UseRegisterAtStart(most_const);
1928  result = DefineAsRegister(new(zone()) LMulS(left, right));
1929  } else {
1930  LOperand* right = UseRegisterAtStart(most_const);
1931  result = DefineAsRegister(new(zone()) LMulI(left, right));
1932  }
1933  if (needs_environment) AssignEnvironment(result);
1934  return result;
1935  } else if (instr->representation().IsDouble()) {
1936  return DoArithmeticD(Token::MUL, instr);
1937  } else {
1938  return DoArithmeticT(Token::MUL, instr);
1939  }
1940 }
1941 
1942 
1943 LInstruction* LChunkBuilder::DoOsrEntry(HOsrEntry* instr) {
1944  ASSERT(argument_count_ == 0);
1945  allocator_->MarkAsOsrEntry();
1946  current_block_->last_environment()->set_ast_id(instr->ast_id());
1947  return AssignEnvironment(new(zone()) LOsrEntry);
1948 }
1949 
1950 
1951 LInstruction* LChunkBuilder::DoParameter(HParameter* instr) {
1952  LParameter* result = new(zone()) LParameter;
1953  if (instr->kind() == HParameter::STACK_PARAMETER) {
1954  int spill_index = chunk_->GetParameterStackSlot(instr->index());
1955  return DefineAsSpilled(result, spill_index);
1956  } else {
1957  ASSERT(info()->IsStub());
1958  CodeStubInterfaceDescriptor* descriptor =
1959  info()->code_stub()->GetInterfaceDescriptor(info()->isolate());
1960  int index = static_cast<int>(instr->index());
1961  Register reg = descriptor->GetParameterRegister(index);
1962  return DefineFixed(result, reg);
1963  }
1964 }
1965 
1966 
1967 LInstruction* LChunkBuilder::DoPower(HPower* instr) {
1968  ASSERT(instr->representation().IsDouble());
1969  // We call a C function for double power. It can't trigger a GC.
1970  // We need to use fixed result register for the call.
1971  Representation exponent_type = instr->right()->representation();
1972  ASSERT(instr->left()->representation().IsDouble());
1973  LOperand* left = UseFixedDouble(instr->left(), d0);
1974  LOperand* right = exponent_type.IsInteger32()
1975  ? UseFixed(instr->right(), x12)
1976  : exponent_type.IsDouble()
1977  ? UseFixedDouble(instr->right(), d1)
1978  : UseFixed(instr->right(), x11);
1979  LPower* result = new(zone()) LPower(left, right);
1980  return MarkAsCall(DefineFixedDouble(result, d0),
1981  instr,
1982  CAN_DEOPTIMIZE_EAGERLY);
1983 }
1984 
1985 
1986 LInstruction* LChunkBuilder::DoPushArgument(HPushArgument* instr) {
1987  LOperand* argument = UseRegister(instr->argument());
1988  return new(zone()) LPushArgument(argument);
1989 }
1990 
1991 
1992 LInstruction* LChunkBuilder::DoRegExpLiteral(HRegExpLiteral* instr) {
1993  LOperand* context = UseFixed(instr->context(), cp);
1994  return MarkAsCall(
1995  DefineFixed(new(zone()) LRegExpLiteral(context), x0), instr);
1996 }
1997 
1998 
1999 LInstruction* LChunkBuilder::DoDoubleBits(HDoubleBits* instr) {
2000  HValue* value = instr->value();
2001  ASSERT(value->representation().IsDouble());
2002  return DefineAsRegister(new(zone()) LDoubleBits(UseRegister(value)));
2003 }
2004 
2005 
2006 LInstruction* LChunkBuilder::DoConstructDouble(HConstructDouble* instr) {
2007  LOperand* lo = UseRegister(instr->lo());
2008  LOperand* hi = UseRegister(instr->hi());
2009  LOperand* temp = TempRegister();
2010  return DefineAsRegister(new(zone()) LConstructDouble(hi, lo, temp));
2011 }
2012 
2013 
2014 LInstruction* LChunkBuilder::DoReturn(HReturn* instr) {
2015  LOperand* context = info()->IsStub()
2016  ? UseFixed(instr->context(), cp)
2017  : NULL;
2018  LOperand* parameter_count = UseRegisterOrConstant(instr->parameter_count());
2019  return new(zone()) LReturn(UseFixed(instr->value(), x0), context,
2020  parameter_count);
2021 }
2022 
2023 
2024 LInstruction* LChunkBuilder::DoSeqStringGetChar(HSeqStringGetChar* instr) {
2025  LOperand* string = UseRegisterAtStart(instr->string());
2026  LOperand* index = UseRegisterOrConstantAtStart(instr->index());
2027  LOperand* temp = TempRegister();
2028  LSeqStringGetChar* result =
2029  new(zone()) LSeqStringGetChar(string, index, temp);
2030  return DefineAsRegister(result);
2031 }
2032 
2033 
2034 LInstruction* LChunkBuilder::DoSeqStringSetChar(HSeqStringSetChar* instr) {
2035  LOperand* string = UseRegister(instr->string());
2036  LOperand* index = FLAG_debug_code
2037  ? UseRegister(instr->index())
2038  : UseRegisterOrConstant(instr->index());
2039  LOperand* value = UseRegister(instr->value());
2040  LOperand* context = FLAG_debug_code ? UseFixed(instr->context(), cp) : NULL;
2041  LOperand* temp = TempRegister();
2042  LSeqStringSetChar* result =
2043  new(zone()) LSeqStringSetChar(context, string, index, value, temp);
2044  return DefineAsRegister(result);
2045 }
2046 
2047 
2048 LInstruction* LChunkBuilder::DoShift(Token::Value op,
2049  HBitwiseBinaryOperation* instr) {
2050  if (instr->representation().IsTagged()) {
2051  return DoArithmeticT(op, instr);
2052  }
2053 
2054  ASSERT(instr->representation().IsInteger32() ||
2055  instr->representation().IsSmi());
2056  ASSERT(instr->left()->representation().Equals(instr->representation()));
2057  ASSERT(instr->right()->representation().Equals(instr->representation()));
2058 
2059  LOperand* left = instr->representation().IsSmi()
2060  ? UseRegister(instr->left())
2061  : UseRegisterAtStart(instr->left());
2062 
2063  HValue* right_value = instr->right();
2064  LOperand* right = NULL;
2065  LOperand* temp = NULL;
2066  int constant_value = 0;
2067  if (right_value->IsConstant()) {
2068  right = UseConstant(right_value);
2069  HConstant* constant = HConstant::cast(right_value);
2070  constant_value = constant->Integer32Value() & 0x1f;
2071  } else {
2072  right = UseRegisterAtStart(right_value);
2073  if (op == Token::ROR) {
2074  temp = TempRegister();
2075  }
2076  }
2077 
2078  // Shift operations can only deoptimize if we do a logical shift by 0 and the
2079  // result cannot be truncated to int32.
2080  bool does_deopt = false;
2081  if ((op == Token::SHR) && (constant_value == 0)) {
2082  if (FLAG_opt_safe_uint32_operations) {
2083  does_deopt = !instr->CheckFlag(HInstruction::kUint32);
2084  } else {
2085  does_deopt = !instr->CheckUsesForFlag(HValue::kTruncatingToInt32);
2086  }
2087  }
2088 
2089  LInstruction* result;
2090  if (instr->representation().IsInteger32()) {
2091  result = DefineAsRegister(new(zone()) LShiftI(op, left, right, does_deopt));
2092  } else {
2093  ASSERT(instr->representation().IsSmi());
2094  result = DefineAsRegister(
2095  new(zone()) LShiftS(op, left, right, temp, does_deopt));
2096  }
2097 
2098  return does_deopt ? AssignEnvironment(result) : result;
2099 }
2100 
2101 
2102 LInstruction* LChunkBuilder::DoRor(HRor* instr) {
2103  return DoShift(Token::ROR, instr);
2104 }
2105 
2106 
2107 LInstruction* LChunkBuilder::DoSar(HSar* instr) {
2108  return DoShift(Token::SAR, instr);
2109 }
2110 
2111 
2112 LInstruction* LChunkBuilder::DoShl(HShl* instr) {
2113  return DoShift(Token::SHL, instr);
2114 }
2115 
2116 
2117 LInstruction* LChunkBuilder::DoShr(HShr* instr) {
2118  return DoShift(Token::SHR, instr);
2119 }
2120 
2121 
2122 LInstruction* LChunkBuilder::DoSimulate(HSimulate* instr) {
2123  instr->ReplayEnvironment(current_block_->last_environment());
2124  return NULL;
2125 }
2126 
2127 
2128 LInstruction* LChunkBuilder::DoStackCheck(HStackCheck* instr) {
2129  if (instr->is_function_entry()) {
2130  LOperand* context = UseFixed(instr->context(), cp);
2131  return MarkAsCall(new(zone()) LStackCheck(context), instr);
2132  } else {
2133  ASSERT(instr->is_backwards_branch());
2134  LOperand* context = UseAny(instr->context());
2135  return AssignEnvironment(
2136  AssignPointerMap(new(zone()) LStackCheck(context)));
2137  }
2138 }
2139 
2140 
2141 LInstruction* LChunkBuilder::DoStoreCodeEntry(HStoreCodeEntry* instr) {
2142  LOperand* function = UseRegister(instr->function());
2143  LOperand* code_object = UseRegisterAtStart(instr->code_object());
2144  LOperand* temp = TempRegister();
2145  return new(zone()) LStoreCodeEntry(function, code_object, temp);
2146 }
2147 
2148 
2149 LInstruction* LChunkBuilder::DoStoreContextSlot(HStoreContextSlot* instr) {
2150  LOperand* temp = TempRegister();
2151  LOperand* context;
2152  LOperand* value;
2153  if (instr->NeedsWriteBarrier()) {
2154  // TODO(all): Replace these constraints when RecordWriteStub has been
2155  // rewritten.
2156  context = UseRegisterAndClobber(instr->context());
2157  value = UseRegisterAndClobber(instr->value());
2158  } else {
2159  context = UseRegister(instr->context());
2160  value = UseRegister(instr->value());
2161  }
2162  LInstruction* result = new(zone()) LStoreContextSlot(context, value, temp);
2163  return instr->RequiresHoleCheck() ? AssignEnvironment(result) : result;
2164 }
2165 
2166 
2167 LInstruction* LChunkBuilder::DoStoreGlobalCell(HStoreGlobalCell* instr) {
2168  LOperand* value = UseRegister(instr->value());
2169  if (instr->RequiresHoleCheck()) {
2170  return AssignEnvironment(new(zone()) LStoreGlobalCell(value,
2171  TempRegister(),
2172  TempRegister()));
2173  } else {
2174  return new(zone()) LStoreGlobalCell(value, TempRegister(), NULL);
2175  }
2176 }
2177 
2178 
2179 LInstruction* LChunkBuilder::DoStoreKeyed(HStoreKeyed* instr) {
2180  LOperand* temp = NULL;
2181  LOperand* elements = NULL;
2182  LOperand* val = NULL;
2183  LOperand* key = UseRegisterOrConstantAtStart(instr->key());
2184 
2185  if (!instr->is_typed_elements() &&
2186  instr->value()->representation().IsTagged() &&
2187  instr->NeedsWriteBarrier()) {
2188  // RecordWrite() will clobber all registers.
2189  elements = UseRegisterAndClobber(instr->elements());
2190  val = UseRegisterAndClobber(instr->value());
2191  temp = TempRegister();
2192  } else {
2193  elements = UseRegister(instr->elements());
2194  val = UseRegister(instr->value());
2195  temp = instr->key()->IsConstant() ? NULL : TempRegister();
2196  }
2197 
2198  if (instr->is_typed_elements()) {
2199  ASSERT((instr->value()->representation().IsInteger32() &&
2200  !IsDoubleOrFloatElementsKind(instr->elements_kind())) ||
2201  (instr->value()->representation().IsDouble() &&
2202  IsDoubleOrFloatElementsKind(instr->elements_kind())));
2203  ASSERT((instr->is_fixed_typed_array() &&
2204  instr->elements()->representation().IsTagged()) ||
2205  (instr->is_external() &&
2206  instr->elements()->representation().IsExternal()));
2207  return new(zone()) LStoreKeyedExternal(elements, key, val, temp);
2208 
2209  } else if (instr->value()->representation().IsDouble()) {
2210  ASSERT(instr->elements()->representation().IsTagged());
2211  return new(zone()) LStoreKeyedFixedDouble(elements, key, val, temp);
2212 
2213  } else {
2214  ASSERT(instr->elements()->representation().IsTagged());
2215  ASSERT(instr->value()->representation().IsSmiOrTagged() ||
2216  instr->value()->representation().IsInteger32());
2217  return new(zone()) LStoreKeyedFixed(elements, key, val, temp);
2218  }
2219 }
2220 
2221 
2222 LInstruction* LChunkBuilder::DoStoreKeyedGeneric(HStoreKeyedGeneric* instr) {
2223  LOperand* context = UseFixed(instr->context(), cp);
2224  LOperand* object = UseFixed(instr->object(), x2);
2225  LOperand* key = UseFixed(instr->key(), x1);
2226  LOperand* value = UseFixed(instr->value(), x0);
2227 
2228  ASSERT(instr->object()->representation().IsTagged());
2229  ASSERT(instr->key()->representation().IsTagged());
2230  ASSERT(instr->value()->representation().IsTagged());
2231 
2232  return MarkAsCall(
2233  new(zone()) LStoreKeyedGeneric(context, object, key, value), instr);
2234 }
2235 
2236 
2237 LInstruction* LChunkBuilder::DoStoreNamedField(HStoreNamedField* instr) {
2238  // TODO(jbramley): It might be beneficial to allow value to be a constant in
2239  // some cases. x64 makes use of this with FLAG_track_fields, for example.
2240 
2241  LOperand* object = UseRegister(instr->object());
2242  LOperand* value;
2243  LOperand* temp0 = NULL;
2244  LOperand* temp1 = NULL;
2245 
2246  if (instr->access().IsExternalMemory() ||
2247  instr->field_representation().IsDouble()) {
2248  value = UseRegister(instr->value());
2249  } else if (instr->NeedsWriteBarrier()) {
2250  value = UseRegisterAndClobber(instr->value());
2251  temp0 = TempRegister();
2252  temp1 = TempRegister();
2253  } else if (instr->NeedsWriteBarrierForMap()) {
2254  value = UseRegister(instr->value());
2255  temp0 = TempRegister();
2256  temp1 = TempRegister();
2257  } else {
2258  value = UseRegister(instr->value());
2259  temp0 = TempRegister();
2260  }
2261 
2262  LStoreNamedField* result =
2263  new(zone()) LStoreNamedField(object, value, temp0, temp1);
2264  if (instr->field_representation().IsHeapObject() &&
2265  !instr->value()->type().IsHeapObject()) {
2266  return AssignEnvironment(result);
2267  }
2268  return result;
2269 }
2270 
2271 
2272 LInstruction* LChunkBuilder::DoStoreNamedGeneric(HStoreNamedGeneric* instr) {
2273  LOperand* context = UseFixed(instr->context(), cp);
2274  LOperand* object = UseFixed(instr->object(), x1);
2275  LOperand* value = UseFixed(instr->value(), x0);
2276  LInstruction* result = new(zone()) LStoreNamedGeneric(context, object, value);
2277  return MarkAsCall(result, instr);
2278 }
2279 
2280 
2281 LInstruction* LChunkBuilder::DoStringAdd(HStringAdd* instr) {
2282  LOperand* context = UseFixed(instr->context(), cp);
2283  LOperand* left = UseFixed(instr->left(), x1);
2284  LOperand* right = UseFixed(instr->right(), x0);
2285 
2286  LStringAdd* result = new(zone()) LStringAdd(context, left, right);
2287  return MarkAsCall(DefineFixed(result, x0), instr);
2288 }
2289 
2290 
2291 LInstruction* LChunkBuilder::DoStringCharCodeAt(HStringCharCodeAt* instr) {
2292  LOperand* string = UseRegisterAndClobber(instr->string());
2293  LOperand* index = UseRegisterAndClobber(instr->index());
2294  LOperand* context = UseAny(instr->context());
2295  LStringCharCodeAt* result =
2296  new(zone()) LStringCharCodeAt(context, string, index);
2297  return AssignEnvironment(AssignPointerMap(DefineAsRegister(result)));
2298 }
2299 
2300 
2301 LInstruction* LChunkBuilder::DoStringCharFromCode(HStringCharFromCode* instr) {
2302  LOperand* char_code = UseRegister(instr->value());
2303  LOperand* context = UseAny(instr->context());
2304  LStringCharFromCode* result =
2305  new(zone()) LStringCharFromCode(context, char_code);
2306  return AssignPointerMap(DefineAsRegister(result));
2307 }
2308 
2309 
2310 LInstruction* LChunkBuilder::DoStringCompareAndBranch(
2311  HStringCompareAndBranch* instr) {
2312  ASSERT(instr->left()->representation().IsTagged());
2313  ASSERT(instr->right()->representation().IsTagged());
2314  LOperand* context = UseFixed(instr->context(), cp);
2315  LOperand* left = UseFixed(instr->left(), x1);
2316  LOperand* right = UseFixed(instr->right(), x0);
2317  LStringCompareAndBranch* result =
2318  new(zone()) LStringCompareAndBranch(context, left, right);
2319  return MarkAsCall(result, instr);
2320 }
2321 
2322 
2323 LInstruction* LChunkBuilder::DoSub(HSub* instr) {
2324  if (instr->representation().IsSmiOrInteger32()) {
2325  ASSERT(instr->left()->representation().Equals(instr->representation()));
2326  ASSERT(instr->right()->representation().Equals(instr->representation()));
2327  LOperand *left;
2328  if (instr->left()->IsConstant() &&
2329  (HConstant::cast(instr->left())->Integer32Value() == 0)) {
2330  left = UseConstant(instr->left());
2331  } else {
2332  left = UseRegisterAtStart(instr->left());
2333  }
2334  LOperand* right = UseRegisterOrConstantAtStart(instr->right());
2335  LInstruction* result = instr->representation().IsSmi() ?
2336  DefineAsRegister(new(zone()) LSubS(left, right)) :
2337  DefineAsRegister(new(zone()) LSubI(left, right));
2338  if (instr->CheckFlag(HValue::kCanOverflow)) {
2339  result = AssignEnvironment(result);
2340  }
2341  return result;
2342  } else if (instr->representation().IsDouble()) {
2343  return DoArithmeticD(Token::SUB, instr);
2344  } else {
2345  return DoArithmeticT(Token::SUB, instr);
2346  }
2347 }
2348 
2349 
2350 LInstruction* LChunkBuilder::DoThisFunction(HThisFunction* instr) {
2351  if (instr->HasNoUses()) {
2352  return NULL;
2353  } else {
2354  return DefineAsRegister(new(zone()) LThisFunction);
2355  }
2356 }
2357 
2358 
2359 LInstruction* LChunkBuilder::DoToFastProperties(HToFastProperties* instr) {
2360  LOperand* object = UseFixed(instr->value(), x0);
2361  LToFastProperties* result = new(zone()) LToFastProperties(object);
2362  return MarkAsCall(DefineFixed(result, x0), instr);
2363 }
2364 
2365 
2366 LInstruction* LChunkBuilder::DoTransitionElementsKind(
2367  HTransitionElementsKind* instr) {
2368  LOperand* object = UseRegister(instr->object());
2369  if (IsSimpleMapChangeTransition(instr->from_kind(), instr->to_kind())) {
2370  LTransitionElementsKind* result =
2371  new(zone()) LTransitionElementsKind(object, NULL,
2372  TempRegister(), TempRegister());
2373  return result;
2374  } else {
2375  LOperand* context = UseFixed(instr->context(), cp);
2376  LTransitionElementsKind* result =
2377  new(zone()) LTransitionElementsKind(object, context, TempRegister());
2378  return AssignPointerMap(result);
2379  }
2380 }
2381 
2382 
2383 LInstruction* LChunkBuilder::DoTrapAllocationMemento(
2384  HTrapAllocationMemento* instr) {
2385  LOperand* object = UseRegister(instr->object());
2386  LOperand* temp1 = TempRegister();
2387  LOperand* temp2 = TempRegister();
2388  LTrapAllocationMemento* result =
2389  new(zone()) LTrapAllocationMemento(object, temp1, temp2);
2390  return AssignEnvironment(result);
2391 }
2392 
2393 
2394 LInstruction* LChunkBuilder::DoTypeof(HTypeof* instr) {
2395  LOperand* context = UseFixed(instr->context(), cp);
2396  // TODO(jbramley): In ARM, this uses UseFixed to force the input to x0.
2397  // However, LCodeGen::DoTypeof just pushes it to the stack (for CallRuntime)
2398  // anyway, so the input doesn't have to be in x0. We might be able to improve
2399  // the ARM back-end a little by relaxing this restriction.
2400  LTypeof* result =
2401  new(zone()) LTypeof(context, UseRegisterAtStart(instr->value()));
2402  return MarkAsCall(DefineFixed(result, x0), instr);
2403 }
2404 
2405 
2406 LInstruction* LChunkBuilder::DoTypeofIsAndBranch(HTypeofIsAndBranch* instr) {
2407  LInstruction* goto_instr = CheckElideControlInstruction(instr);
2408  if (goto_instr != NULL) return goto_instr;
2409 
2410  // We only need temp registers in some cases, but we can't dereference the
2411  // instr->type_literal() handle to test that here.
2412  LOperand* temp1 = TempRegister();
2413  LOperand* temp2 = TempRegister();
2414 
2415  return new(zone()) LTypeofIsAndBranch(
2416  UseRegister(instr->value()), temp1, temp2);
2417 }
2418 
2419 
2420 LInstruction* LChunkBuilder::DoUnaryMathOperation(HUnaryMathOperation* instr) {
2421  switch (instr->op()) {
2422  case kMathAbs: {
2423  Representation r = instr->representation();
2424  if (r.IsTagged()) {
2425  // The tagged case might need to allocate a HeapNumber for the result,
2426  // so it is handled by a separate LInstruction.
2427  LOperand* context = UseFixed(instr->context(), cp);
2428  LOperand* input = UseRegister(instr->value());
2429  LOperand* temp1 = TempRegister();
2430  LOperand* temp2 = TempRegister();
2431  LOperand* temp3 = TempRegister();
2432  LMathAbsTagged* result =
2433  new(zone()) LMathAbsTagged(context, input, temp1, temp2, temp3);
2434  return AssignEnvironment(AssignPointerMap(DefineAsRegister(result)));
2435  } else {
2436  LOperand* input = UseRegisterAtStart(instr->value());
2437  LMathAbs* result = new(zone()) LMathAbs(input);
2438  if (r.IsDouble()) {
2439  // The Double case can never fail so it doesn't need an environment.
2440  return DefineAsRegister(result);
2441  } else {
2442  ASSERT(r.IsInteger32() || r.IsSmi());
2443  // The Integer32 and Smi cases need an environment because they can
2444  // deoptimize on minimum representable number.
2445  return AssignEnvironment(DefineAsRegister(result));
2446  }
2447  }
2448  }
2449  case kMathExp: {
2450  ASSERT(instr->representation().IsDouble());
2451  ASSERT(instr->value()->representation().IsDouble());
2452  LOperand* input = UseRegister(instr->value());
2453  // TODO(all): Implement TempFPRegister.
2454  LOperand* double_temp1 = FixedTemp(d24); // This was chosen arbitrarily.
2455  LOperand* temp1 = TempRegister();
2456  LOperand* temp2 = TempRegister();
2457  LOperand* temp3 = TempRegister();
2458  LMathExp* result = new(zone()) LMathExp(input, double_temp1,
2459  temp1, temp2, temp3);
2460  return DefineAsRegister(result);
2461  }
2462  case kMathFloor: {
2463  ASSERT(instr->representation().IsInteger32());
2464  ASSERT(instr->value()->representation().IsDouble());
2465  // TODO(jbramley): ARM64 can easily handle a double argument with frintm,
2466  // but we're never asked for it here. At the moment, we fall back to the
2467  // runtime if the result doesn't fit, like the other architectures.
2468  LOperand* input = UseRegisterAtStart(instr->value());
2469  LMathFloor* result = new(zone()) LMathFloor(input);
2470  return AssignEnvironment(AssignPointerMap(DefineAsRegister(result)));
2471  }
2472  case kMathLog: {
2473  ASSERT(instr->representation().IsDouble());
2474  ASSERT(instr->value()->representation().IsDouble());
2475  LOperand* input = UseFixedDouble(instr->value(), d0);
2476  LMathLog* result = new(zone()) LMathLog(input);
2477  return MarkAsCall(DefineFixedDouble(result, d0), instr);
2478  }
2479  case kMathPowHalf: {
2480  ASSERT(instr->representation().IsDouble());
2481  ASSERT(instr->value()->representation().IsDouble());
2482  LOperand* input = UseRegister(instr->value());
2483  return DefineAsRegister(new(zone()) LMathPowHalf(input));
2484  }
2485  case kMathRound: {
2486  ASSERT(instr->representation().IsInteger32());
2487  ASSERT(instr->value()->representation().IsDouble());
2488  // TODO(jbramley): As with kMathFloor, we can probably handle double
2489  // results fairly easily, but we are never asked for them.
2490  LOperand* input = UseRegister(instr->value());
2491  LOperand* temp = FixedTemp(d24); // Choosen arbitrarily.
2492  LMathRound* result = new(zone()) LMathRound(input, temp);
2493  return AssignEnvironment(DefineAsRegister(result));
2494  }
2495  case kMathSqrt: {
2496  ASSERT(instr->representation().IsDouble());
2497  ASSERT(instr->value()->representation().IsDouble());
2498  LOperand* input = UseRegisterAtStart(instr->value());
2499  return DefineAsRegister(new(zone()) LMathSqrt(input));
2500  }
2501  case kMathClz32: {
2502  ASSERT(instr->representation().IsInteger32());
2503  ASSERT(instr->value()->representation().IsInteger32());
2504  LOperand* input = UseRegisterAtStart(instr->value());
2505  return DefineAsRegister(new(zone()) LMathClz32(input));
2506  }
2507  default:
2508  UNREACHABLE();
2509  return NULL;
2510  }
2511 }
2512 
2513 
2514 LInstruction* LChunkBuilder::DoUnknownOSRValue(HUnknownOSRValue* instr) {
2515  // Use an index that corresponds to the location in the unoptimized frame,
2516  // which the optimized frame will subsume.
2517  int env_index = instr->index();
2518  int spill_index = 0;
2519  if (instr->environment()->is_parameter_index(env_index)) {
2520  spill_index = chunk_->GetParameterStackSlot(env_index);
2521  } else {
2522  spill_index = env_index - instr->environment()->first_local_index();
2523  if (spill_index > LUnallocated::kMaxFixedSlotIndex) {
2524  Abort(kTooManySpillSlotsNeededForOSR);
2525  spill_index = 0;
2526  }
2527  }
2528  return DefineAsSpilled(new(zone()) LUnknownOSRValue, spill_index);
2529 }
2530 
2531 
2532 LInstruction* LChunkBuilder::DoUseConst(HUseConst* instr) {
2533  return NULL;
2534 }
2535 
2536 
2537 LInstruction* LChunkBuilder::DoForInPrepareMap(HForInPrepareMap* instr) {
2538  LOperand* context = UseFixed(instr->context(), cp);
2539  // Assign object to a fixed register different from those already used in
2540  // LForInPrepareMap.
2541  LOperand* object = UseFixed(instr->enumerable(), x0);
2542  LForInPrepareMap* result = new(zone()) LForInPrepareMap(context, object);
2543  return MarkAsCall(DefineFixed(result, x0), instr, CAN_DEOPTIMIZE_EAGERLY);
2544 }
2545 
2546 
2547 LInstruction* LChunkBuilder::DoForInCacheArray(HForInCacheArray* instr) {
2548  LOperand* map = UseRegister(instr->map());
2549  return AssignEnvironment(DefineAsRegister(new(zone()) LForInCacheArray(map)));
2550 }
2551 
2552 
2553 LInstruction* LChunkBuilder::DoCheckMapValue(HCheckMapValue* instr) {
2554  LOperand* value = UseRegisterAtStart(instr->value());
2555  LOperand* map = UseRegister(instr->map());
2556  LOperand* temp = TempRegister();
2557  return AssignEnvironment(new(zone()) LCheckMapValue(value, map, temp));
2558 }
2559 
2560 
2561 LInstruction* LChunkBuilder::DoLoadFieldByIndex(HLoadFieldByIndex* instr) {
2562  LOperand* object = UseRegisterAtStart(instr->object());
2563  LOperand* index = UseRegister(instr->index());
2564  return DefineAsRegister(new(zone()) LLoadFieldByIndex(object, index));
2565 }
2566 
2567 
2568 LInstruction* LChunkBuilder::DoWrapReceiver(HWrapReceiver* instr) {
2569  LOperand* receiver = UseRegister(instr->receiver());
2570  LOperand* function = UseRegister(instr->function());
2571  LWrapReceiver* result = new(zone()) LWrapReceiver(receiver, function);
2572  return AssignEnvironment(DefineAsRegister(result));
2573 }
2574 
2575 
2576 } } // namespace v8::internal
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter NULL
Definition: flags.cc:269
const int kMinInt
Definition: globals.h:249
static LUnallocated * cast(LOperand *op)
Definition: lithium.h:156
void PrintDataTo(StringStream *stream) V8_OVERRIDE
const char * ToCString(const v8::String::Utf8Value &value)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf map
Definition: flags.cc:350
virtual void PrintDataTo(StringStream *stream) V8_OVERRIDE
Definition: lithium-arm.cc:124
static String * cast(Object *obj)
virtual void PrintOutputOperandTo(StringStream *stream)
Definition: lithium-arm.cc:99
const Register cp
const LowDwVfpRegister d0
const DwVfpRegister d24
int int32_t
Definition: unicode.cc:47
const int kMaxInt
Definition: globals.h:248
LEnvironment * environment() const
Definition: lithium-arm.h:246
#define ASSERT(condition)
Definition: checks.h:329
virtual const char * Mnemonic() const =0
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:86
bool IsSimpleMapChangeTransition(ElementsKind from_kind, ElementsKind to_kind)
#define LITHIUM_CONCRETE_INSTRUCTION_LIST(V)
Definition: lithium-arm.h:43
virtual LOperand * result() const =0
static const int kMaxFixedSlotIndex
Definition: lithium.h:195
virtual bool HasResult() const =0
#define UNREACHABLE()
Definition: checks.h:52
DwVfpRegister DoubleRegister
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object size
Definition: flags.cc:211
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
static const int kMinValue
Definition: objects.h:1679
static const char * String(Value tok)
Definition: token.h:294
bool HasEnvironment() const
Definition: lithium-arm.h:247
static int ToAllocationIndex(Register reg)
bool IsPowerOf2(T x)
Definition: utils.h:51
virtual void PrintTo(StringStream *stream)
Definition: lithium-arm.cc:67
LPointerMap * pointer_map() const
Definition: lithium-arm.h:250
const char * ElementsKindToString(ElementsKind kind)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function info
Definition: flags.cc:317
static int ToAllocationIndex(DwVfpRegister reg)
bool IsDoubleOrFloatElementsKind(ElementsKind kind)
T Abs(T a)
Definition: utils.h:241
#define DEFINE_COMPILE(type)
bool HasPointerMap() const
Definition: lithium-arm.h:251
const LowDwVfpRegister d1
static Representation Tagged()
bool IsRedundant() const
Definition: lithium-arm.cc:113
static const int kMaxValue
Definition: objects.h:1681
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in name
Definition: flags.cc:505
static HValue * cast(HValue *value)
void PrintTo(StringStream *stream)
Definition: lithium.cc:55