v8  3.25.30(node0.11.13)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
lithium-mips.cc
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "v8.h"
29 
30 #include "lithium-allocator-inl.h"
31 #include "mips/lithium-mips.h"
33 #include "hydrogen-osr.h"
34 
35 namespace v8 {
36 namespace internal {
37 
38 #define DEFINE_COMPILE(type) \
39  void L##type::CompileToNative(LCodeGen* generator) { \
40  generator->Do##type(this); \
41  }
43 #undef DEFINE_COMPILE
44 
45 #ifdef DEBUG
46 void LInstruction::VerifyCall() {
47  // Call instructions can use only fixed registers as temporaries and
48  // outputs because all registers are blocked by the calling convention.
49  // Inputs operands must use a fixed register or use-at-start policy or
50  // a non-register policy.
51  ASSERT(Output() == NULL ||
52  LUnallocated::cast(Output())->HasFixedPolicy() ||
53  !LUnallocated::cast(Output())->HasRegisterPolicy());
54  for (UseIterator it(this); !it.Done(); it.Advance()) {
55  LUnallocated* operand = LUnallocated::cast(it.Current());
56  ASSERT(operand->HasFixedPolicy() ||
57  operand->IsUsedAtStart());
58  }
59  for (TempIterator it(this); !it.Done(); it.Advance()) {
60  LUnallocated* operand = LUnallocated::cast(it.Current());
61  ASSERT(operand->HasFixedPolicy() ||!operand->HasRegisterPolicy());
62  }
63 }
64 #endif
65 
66 
67 void LInstruction::PrintTo(StringStream* stream) {
68  stream->Add("%s ", this->Mnemonic());
69 
70  PrintOutputOperandTo(stream);
71 
72  PrintDataTo(stream);
73 
74  if (HasEnvironment()) {
75  stream->Add(" ");
76  environment()->PrintTo(stream);
77  }
78 
79  if (HasPointerMap()) {
80  stream->Add(" ");
81  pointer_map()->PrintTo(stream);
82  }
83 }
84 
85 
86 void LInstruction::PrintDataTo(StringStream* stream) {
87  stream->Add("= ");
88  for (int i = 0; i < InputCount(); i++) {
89  if (i > 0) stream->Add(" ");
90  if (InputAt(i) == NULL) {
91  stream->Add("NULL");
92  } else {
93  InputAt(i)->PrintTo(stream);
94  }
95  }
96 }
97 
98 
99 void LInstruction::PrintOutputOperandTo(StringStream* stream) {
100  if (HasResult()) result()->PrintTo(stream);
101 }
102 
103 
104 void LLabel::PrintDataTo(StringStream* stream) {
105  LGap::PrintDataTo(stream);
106  LLabel* rep = replacement();
107  if (rep != NULL) {
108  stream->Add(" Dead block replaced with B%d", rep->block_id());
109  }
110 }
111 
112 
113 bool LGap::IsRedundant() const {
114  for (int i = 0; i < 4; i++) {
115  if (parallel_moves_[i] != NULL && !parallel_moves_[i]->IsRedundant()) {
116  return false;
117  }
118  }
119 
120  return true;
121 }
122 
123 
124 void LGap::PrintDataTo(StringStream* stream) {
125  for (int i = 0; i < 4; i++) {
126  stream->Add("(");
127  if (parallel_moves_[i] != NULL) {
128  parallel_moves_[i]->PrintDataTo(stream);
129  }
130  stream->Add(") ");
131  }
132 }
133 
134 
135 const char* LArithmeticD::Mnemonic() const {
136  switch (op()) {
137  case Token::ADD: return "add-d";
138  case Token::SUB: return "sub-d";
139  case Token::MUL: return "mul-d";
140  case Token::DIV: return "div-d";
141  case Token::MOD: return "mod-d";
142  default:
143  UNREACHABLE();
144  return NULL;
145  }
146 }
147 
148 
149 const char* LArithmeticT::Mnemonic() const {
150  switch (op()) {
151  case Token::ADD: return "add-t";
152  case Token::SUB: return "sub-t";
153  case Token::MUL: return "mul-t";
154  case Token::MOD: return "mod-t";
155  case Token::DIV: return "div-t";
156  case Token::BIT_AND: return "bit-and-t";
157  case Token::BIT_OR: return "bit-or-t";
158  case Token::BIT_XOR: return "bit-xor-t";
159  case Token::ROR: return "ror-t";
160  case Token::SHL: return "sll-t";
161  case Token::SAR: return "sra-t";
162  case Token::SHR: return "srl-t";
163  default:
164  UNREACHABLE();
165  return NULL;
166  }
167 }
168 
169 
170 bool LGoto::HasInterestingComment(LCodeGen* gen) const {
171  return !gen->IsNextEmittedBlock(block_id());
172 }
173 
174 
175 void LGoto::PrintDataTo(StringStream* stream) {
176  stream->Add("B%d", block_id());
177 }
178 
179 
180 void LBranch::PrintDataTo(StringStream* stream) {
181  stream->Add("B%d | B%d on ", true_block_id(), false_block_id());
182  value()->PrintTo(stream);
183 }
184 
185 
186 LInstruction* LChunkBuilder::DoDebugBreak(HDebugBreak* instr) {
187  return new(zone()) LDebugBreak();
188 }
189 
190 
191 void LCompareNumericAndBranch::PrintDataTo(StringStream* stream) {
192  stream->Add("if ");
193  left()->PrintTo(stream);
194  stream->Add(" %s ", Token::String(op()));
195  right()->PrintTo(stream);
196  stream->Add(" then B%d else B%d", true_block_id(), false_block_id());
197 }
198 
199 
200 void LIsObjectAndBranch::PrintDataTo(StringStream* stream) {
201  stream->Add("if is_object(");
202  value()->PrintTo(stream);
203  stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
204 }
205 
206 
207 void LIsStringAndBranch::PrintDataTo(StringStream* stream) {
208  stream->Add("if is_string(");
209  value()->PrintTo(stream);
210  stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
211 }
212 
213 
214 void LIsSmiAndBranch::PrintDataTo(StringStream* stream) {
215  stream->Add("if is_smi(");
216  value()->PrintTo(stream);
217  stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
218 }
219 
220 
221 void LIsUndetectableAndBranch::PrintDataTo(StringStream* stream) {
222  stream->Add("if is_undetectable(");
223  value()->PrintTo(stream);
224  stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
225 }
226 
227 
228 void LStringCompareAndBranch::PrintDataTo(StringStream* stream) {
229  stream->Add("if string_compare(");
230  left()->PrintTo(stream);
231  right()->PrintTo(stream);
232  stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
233 }
234 
235 
236 void LHasInstanceTypeAndBranch::PrintDataTo(StringStream* stream) {
237  stream->Add("if has_instance_type(");
238  value()->PrintTo(stream);
239  stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
240 }
241 
242 
243 void LHasCachedArrayIndexAndBranch::PrintDataTo(StringStream* stream) {
244  stream->Add("if has_cached_array_index(");
245  value()->PrintTo(stream);
246  stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
247 }
248 
249 
250 void LClassOfTestAndBranch::PrintDataTo(StringStream* stream) {
251  stream->Add("if class_of_test(");
252  value()->PrintTo(stream);
253  stream->Add(", \"%o\") then B%d else B%d",
254  *hydrogen()->class_name(),
255  true_block_id(),
256  false_block_id());
257 }
258 
259 
260 void LTypeofIsAndBranch::PrintDataTo(StringStream* stream) {
261  stream->Add("if typeof ");
262  value()->PrintTo(stream);
263  stream->Add(" == \"%s\" then B%d else B%d",
264  hydrogen()->type_literal()->ToCString().get(),
265  true_block_id(), false_block_id());
266 }
267 
268 
269 void LStoreCodeEntry::PrintDataTo(StringStream* stream) {
270  stream->Add(" = ");
271  function()->PrintTo(stream);
272  stream->Add(".code_entry = ");
273  code_object()->PrintTo(stream);
274 }
275 
276 
277 void LInnerAllocatedObject::PrintDataTo(StringStream* stream) {
278  stream->Add(" = ");
279  base_object()->PrintTo(stream);
280  stream->Add(" + ");
281  offset()->PrintTo(stream);
282 }
283 
284 
285 void LCallJSFunction::PrintDataTo(StringStream* stream) {
286  stream->Add("= ");
287  function()->PrintTo(stream);
288  stream->Add("#%d / ", arity());
289 }
290 
291 
292 void LCallWithDescriptor::PrintDataTo(StringStream* stream) {
293  for (int i = 0; i < InputCount(); i++) {
294  InputAt(i)->PrintTo(stream);
295  stream->Add(" ");
296  }
297  stream->Add("#%d / ", arity());
298 }
299 
300 
301 void LLoadContextSlot::PrintDataTo(StringStream* stream) {
302  context()->PrintTo(stream);
303  stream->Add("[%d]", slot_index());
304 }
305 
306 
307 void LStoreContextSlot::PrintDataTo(StringStream* stream) {
308  context()->PrintTo(stream);
309  stream->Add("[%d] <- ", slot_index());
310  value()->PrintTo(stream);
311 }
312 
313 
314 void LInvokeFunction::PrintDataTo(StringStream* stream) {
315  stream->Add("= ");
316  function()->PrintTo(stream);
317  stream->Add(" #%d / ", arity());
318 }
319 
320 
321 void LCallNew::PrintDataTo(StringStream* stream) {
322  stream->Add("= ");
323  constructor()->PrintTo(stream);
324  stream->Add(" #%d / ", arity());
325 }
326 
327 
328 void LCallNewArray::PrintDataTo(StringStream* stream) {
329  stream->Add("= ");
330  constructor()->PrintTo(stream);
331  stream->Add(" #%d / ", arity());
332  ElementsKind kind = hydrogen()->elements_kind();
333  stream->Add(" (%s) ", ElementsKindToString(kind));
334 }
335 
336 
337 void LAccessArgumentsAt::PrintDataTo(StringStream* stream) {
338  arguments()->PrintTo(stream);
339  stream->Add(" length ");
340  length()->PrintTo(stream);
341  stream->Add(" index ");
342  index()->PrintTo(stream);
343 }
344 
345 
346 void LStoreNamedField::PrintDataTo(StringStream* stream) {
347  object()->PrintTo(stream);
348  hydrogen()->access().PrintTo(stream);
349  stream->Add(" <- ");
350  value()->PrintTo(stream);
351 }
352 
353 
354 void LStoreNamedGeneric::PrintDataTo(StringStream* stream) {
355  object()->PrintTo(stream);
356  stream->Add(".");
357  stream->Add(String::cast(*name())->ToCString().get());
358  stream->Add(" <- ");
359  value()->PrintTo(stream);
360 }
361 
362 
363 void LLoadKeyed::PrintDataTo(StringStream* stream) {
364  elements()->PrintTo(stream);
365  stream->Add("[");
366  key()->PrintTo(stream);
367  if (hydrogen()->IsDehoisted()) {
368  stream->Add(" + %d]", additional_index());
369  } else {
370  stream->Add("]");
371  }
372 }
373 
374 
375 void LStoreKeyed::PrintDataTo(StringStream* stream) {
376  elements()->PrintTo(stream);
377  stream->Add("[");
378  key()->PrintTo(stream);
379  if (hydrogen()->IsDehoisted()) {
380  stream->Add(" + %d] <-", additional_index());
381  } else {
382  stream->Add("] <- ");
383  }
384 
385  if (value() == NULL) {
386  ASSERT(hydrogen()->IsConstantHoleStore() &&
387  hydrogen()->value()->representation().IsDouble());
388  stream->Add("<the hole(nan)>");
389  } else {
390  value()->PrintTo(stream);
391  }
392 }
393 
394 
395 void LStoreKeyedGeneric::PrintDataTo(StringStream* stream) {
396  object()->PrintTo(stream);
397  stream->Add("[");
398  key()->PrintTo(stream);
399  stream->Add("] <- ");
400  value()->PrintTo(stream);
401 }
402 
403 
404 void LTransitionElementsKind::PrintDataTo(StringStream* stream) {
405  object()->PrintTo(stream);
406  stream->Add(" %p -> %p", *original_map(), *transitioned_map());
407 }
408 
409 
410 int LPlatformChunk::GetNextSpillIndex(RegisterKind kind) {
411  // Skip a slot if for a double-width slot.
412  if (kind == DOUBLE_REGISTERS) spill_slot_count_++;
413  return spill_slot_count_++;
414 }
415 
416 
417 LOperand* LPlatformChunk::GetNextSpillSlot(RegisterKind kind) {
418  int index = GetNextSpillIndex(kind);
419  if (kind == DOUBLE_REGISTERS) {
420  return LDoubleStackSlot::Create(index, zone());
421  } else {
422  ASSERT(kind == GENERAL_REGISTERS);
423  return LStackSlot::Create(index, zone());
424  }
425 }
426 
427 
428 LPlatformChunk* LChunkBuilder::Build() {
429  ASSERT(is_unused());
430  chunk_ = new(zone()) LPlatformChunk(info(), graph());
431  LPhase phase("L_Building chunk", chunk_);
432  status_ = BUILDING;
433 
434  // If compiling for OSR, reserve space for the unoptimized frame,
435  // which will be subsumed into this frame.
436  if (graph()->has_osr()) {
437  for (int i = graph()->osr()->UnoptimizedFrameSlots(); i > 0; i--) {
438  chunk_->GetNextSpillIndex(GENERAL_REGISTERS);
439  }
440  }
441 
442  const ZoneList<HBasicBlock*>* blocks = graph()->blocks();
443  for (int i = 0; i < blocks->length(); i++) {
444  HBasicBlock* next = NULL;
445  if (i < blocks->length() - 1) next = blocks->at(i + 1);
446  DoBasicBlock(blocks->at(i), next);
447  if (is_aborted()) return NULL;
448  }
449  status_ = DONE;
450  return chunk_;
451 }
452 
453 
454 void LCodeGen::Abort(BailoutReason reason) {
455  info()->set_bailout_reason(reason);
456  status_ = ABORTED;
457 }
458 
459 
460 LUnallocated* LChunkBuilder::ToUnallocated(Register reg) {
461  return new(zone()) LUnallocated(LUnallocated::FIXED_REGISTER,
463 }
464 
465 
466 LUnallocated* LChunkBuilder::ToUnallocated(DoubleRegister reg) {
467  return new(zone()) LUnallocated(LUnallocated::FIXED_DOUBLE_REGISTER,
469 }
470 
471 
472 LOperand* LChunkBuilder::UseFixed(HValue* value, Register fixed_register) {
473  return Use(value, ToUnallocated(fixed_register));
474 }
475 
476 
477 LOperand* LChunkBuilder::UseFixedDouble(HValue* value, DoubleRegister reg) {
478  return Use(value, ToUnallocated(reg));
479 }
480 
481 
482 LOperand* LChunkBuilder::UseRegister(HValue* value) {
483  return Use(value, new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER));
484 }
485 
486 
487 LOperand* LChunkBuilder::UseRegisterAtStart(HValue* value) {
488  return Use(value,
489  new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER,
491 }
492 
493 
494 LOperand* LChunkBuilder::UseTempRegister(HValue* value) {
495  return Use(value, new(zone()) LUnallocated(LUnallocated::WRITABLE_REGISTER));
496 }
497 
498 
499 LOperand* LChunkBuilder::Use(HValue* value) {
500  return Use(value, new(zone()) LUnallocated(LUnallocated::NONE));
501 }
502 
503 
504 LOperand* LChunkBuilder::UseAtStart(HValue* value) {
505  return Use(value, new(zone()) LUnallocated(LUnallocated::NONE,
507 }
508 
509 
510 LOperand* LChunkBuilder::UseOrConstant(HValue* value) {
511  return value->IsConstant()
512  ? chunk_->DefineConstantOperand(HConstant::cast(value))
513  : Use(value);
514 }
515 
516 
517 LOperand* LChunkBuilder::UseOrConstantAtStart(HValue* value) {
518  return value->IsConstant()
519  ? chunk_->DefineConstantOperand(HConstant::cast(value))
520  : UseAtStart(value);
521 }
522 
523 
524 LOperand* LChunkBuilder::UseRegisterOrConstant(HValue* value) {
525  return value->IsConstant()
526  ? chunk_->DefineConstantOperand(HConstant::cast(value))
527  : UseRegister(value);
528 }
529 
530 
531 LOperand* LChunkBuilder::UseRegisterOrConstantAtStart(HValue* value) {
532  return value->IsConstant()
533  ? chunk_->DefineConstantOperand(HConstant::cast(value))
534  : UseRegisterAtStart(value);
535 }
536 
537 
538 LOperand* LChunkBuilder::UseConstant(HValue* value) {
539  return chunk_->DefineConstantOperand(HConstant::cast(value));
540 }
541 
542 
543 LOperand* LChunkBuilder::UseAny(HValue* value) {
544  return value->IsConstant()
545  ? chunk_->DefineConstantOperand(HConstant::cast(value))
546  : Use(value, new(zone()) LUnallocated(LUnallocated::ANY));
547 }
548 
549 
550 LOperand* LChunkBuilder::Use(HValue* value, LUnallocated* operand) {
551  if (value->EmitAtUses()) {
552  HInstruction* instr = HInstruction::cast(value);
553  VisitInstruction(instr);
554  }
555  operand->set_virtual_register(value->id());
556  return operand;
557 }
558 
559 
560 LInstruction* LChunkBuilder::Define(LTemplateResultInstruction<1>* instr,
561  LUnallocated* result) {
562  result->set_virtual_register(current_instruction_->id());
563  instr->set_result(result);
564  return instr;
565 }
566 
567 
568 LInstruction* LChunkBuilder::DefineAsRegister(
569  LTemplateResultInstruction<1>* instr) {
570  return Define(instr,
571  new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER));
572 }
573 
574 
575 LInstruction* LChunkBuilder::DefineAsSpilled(
576  LTemplateResultInstruction<1>* instr, int index) {
577  return Define(instr,
578  new(zone()) LUnallocated(LUnallocated::FIXED_SLOT, index));
579 }
580 
581 
582 LInstruction* LChunkBuilder::DefineSameAsFirst(
583  LTemplateResultInstruction<1>* instr) {
584  return Define(instr,
585  new(zone()) LUnallocated(LUnallocated::SAME_AS_FIRST_INPUT));
586 }
587 
588 
589 LInstruction* LChunkBuilder::DefineFixed(
590  LTemplateResultInstruction<1>* instr, Register reg) {
591  return Define(instr, ToUnallocated(reg));
592 }
593 
594 
595 LInstruction* LChunkBuilder::DefineFixedDouble(
596  LTemplateResultInstruction<1>* instr, DoubleRegister reg) {
597  return Define(instr, ToUnallocated(reg));
598 }
599 
600 
601 LInstruction* LChunkBuilder::AssignEnvironment(LInstruction* instr) {
602  HEnvironment* hydrogen_env = current_block_->last_environment();
603  int argument_index_accumulator = 0;
604  ZoneList<HValue*> objects_to_materialize(0, zone());
605  instr->set_environment(CreateEnvironment(hydrogen_env,
606  &argument_index_accumulator,
607  &objects_to_materialize));
608  return instr;
609 }
610 
611 
612 LInstruction* LChunkBuilder::MarkAsCall(LInstruction* instr,
613  HInstruction* hinstr,
614  CanDeoptimize can_deoptimize) {
615  info()->MarkAsNonDeferredCalling();
616 #ifdef DEBUG
617  instr->VerifyCall();
618 #endif
619  instr->MarkAsCall();
620  instr = AssignPointerMap(instr);
621 
622  // If instruction does not have side-effects lazy deoptimization
623  // after the call will try to deoptimize to the point before the call.
624  // Thus we still need to attach environment to this call even if
625  // call sequence can not deoptimize eagerly.
626  bool needs_environment =
627  (can_deoptimize == CAN_DEOPTIMIZE_EAGERLY) ||
628  !hinstr->HasObservableSideEffects();
629  if (needs_environment && !instr->HasEnvironment()) {
630  instr = AssignEnvironment(instr);
631  }
632 
633  return instr;
634 }
635 
636 
637 LInstruction* LChunkBuilder::AssignPointerMap(LInstruction* instr) {
638  ASSERT(!instr->HasPointerMap());
639  instr->set_pointer_map(new(zone()) LPointerMap(zone()));
640  return instr;
641 }
642 
643 
644 LUnallocated* LChunkBuilder::TempRegister() {
645  LUnallocated* operand =
646  new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER);
647  int vreg = allocator_->GetVirtualRegister();
648  if (!allocator_->AllocationOk()) {
649  Abort(kOutOfVirtualRegistersWhileTryingToAllocateTempRegister);
650  vreg = 0;
651  }
652  operand->set_virtual_register(vreg);
653  return operand;
654 }
655 
656 
657 LOperand* LChunkBuilder::FixedTemp(Register reg) {
658  LUnallocated* operand = ToUnallocated(reg);
659  ASSERT(operand->HasFixedPolicy());
660  return operand;
661 }
662 
663 
664 LOperand* LChunkBuilder::FixedTemp(DoubleRegister reg) {
665  LUnallocated* operand = ToUnallocated(reg);
666  ASSERT(operand->HasFixedPolicy());
667  return operand;
668 }
669 
670 
671 LInstruction* LChunkBuilder::DoBlockEntry(HBlockEntry* instr) {
672  return new(zone()) LLabel(instr->block());
673 }
674 
675 
676 LInstruction* LChunkBuilder::DoDummyUse(HDummyUse* instr) {
677  return DefineAsRegister(new(zone()) LDummyUse(UseAny(instr->value())));
678 }
679 
680 
681 LInstruction* LChunkBuilder::DoEnvironmentMarker(HEnvironmentMarker* instr) {
682  UNREACHABLE();
683  return NULL;
684 }
685 
686 
687 LInstruction* LChunkBuilder::DoDeoptimize(HDeoptimize* instr) {
688  return AssignEnvironment(new(zone()) LDeoptimize);
689 }
690 
691 
692 LInstruction* LChunkBuilder::DoShift(Token::Value op,
693  HBitwiseBinaryOperation* instr) {
694  if (instr->representation().IsSmiOrInteger32()) {
695  ASSERT(instr->left()->representation().Equals(instr->representation()));
696  ASSERT(instr->right()->representation().Equals(instr->representation()));
697  LOperand* left = UseRegisterAtStart(instr->left());
698 
699  HValue* right_value = instr->right();
700  LOperand* right = NULL;
701  int constant_value = 0;
702  bool does_deopt = false;
703  if (right_value->IsConstant()) {
704  HConstant* constant = HConstant::cast(right_value);
705  right = chunk_->DefineConstantOperand(constant);
706  constant_value = constant->Integer32Value() & 0x1f;
707  // Left shifts can deoptimize if we shift by > 0 and the result cannot be
708  // truncated to smi.
709  if (instr->representation().IsSmi() && constant_value > 0) {
710  does_deopt = !instr->CheckUsesForFlag(HValue::kTruncatingToSmi);
711  }
712  } else {
713  right = UseRegisterAtStart(right_value);
714  }
715 
716  // Shift operations can only deoptimize if we do a logical shift
717  // by 0 and the result cannot be truncated to int32.
718  if (op == Token::SHR && constant_value == 0) {
719  if (FLAG_opt_safe_uint32_operations) {
720  does_deopt = !instr->CheckFlag(HInstruction::kUint32);
721  } else {
722  does_deopt = !instr->CheckUsesForFlag(HValue::kTruncatingToInt32);
723  }
724  }
725 
726  LInstruction* result =
727  DefineAsRegister(new(zone()) LShiftI(op, left, right, does_deopt));
728  return does_deopt ? AssignEnvironment(result) : result;
729  } else {
730  return DoArithmeticT(op, instr);
731  }
732 }
733 
734 
735 LInstruction* LChunkBuilder::DoArithmeticD(Token::Value op,
736  HArithmeticBinaryOperation* instr) {
737  ASSERT(instr->representation().IsDouble());
738  ASSERT(instr->left()->representation().IsDouble());
739  ASSERT(instr->right()->representation().IsDouble());
740  if (op == Token::MOD) {
741  LOperand* left = UseFixedDouble(instr->left(), f2);
742  LOperand* right = UseFixedDouble(instr->right(), f4);
743  LArithmeticD* result = new(zone()) LArithmeticD(op, left, right);
744  // We call a C function for double modulo. It can't trigger a GC. We need
745  // to use fixed result register for the call.
746  // TODO(fschneider): Allow any register as input registers.
747  return MarkAsCall(DefineFixedDouble(result, f2), instr);
748  } else {
749  LOperand* left = UseRegisterAtStart(instr->left());
750  LOperand* right = UseRegisterAtStart(instr->right());
751  LArithmeticD* result = new(zone()) LArithmeticD(op, left, right);
752  return DefineAsRegister(result);
753  }
754 }
755 
756 
757 LInstruction* LChunkBuilder::DoArithmeticT(Token::Value op,
758  HBinaryOperation* instr) {
759  HValue* left = instr->left();
760  HValue* right = instr->right();
761  ASSERT(left->representation().IsTagged());
762  ASSERT(right->representation().IsTagged());
763  LOperand* context = UseFixed(instr->context(), cp);
764  LOperand* left_operand = UseFixed(left, a1);
765  LOperand* right_operand = UseFixed(right, a0);
766  LArithmeticT* result =
767  new(zone()) LArithmeticT(op, context, left_operand, right_operand);
768  return MarkAsCall(DefineFixed(result, v0), instr);
769 }
770 
771 
772 void LChunkBuilder::DoBasicBlock(HBasicBlock* block, HBasicBlock* next_block) {
773  ASSERT(is_building());
774  current_block_ = block;
775  next_block_ = next_block;
776  if (block->IsStartBlock()) {
777  block->UpdateEnvironment(graph_->start_environment());
778  argument_count_ = 0;
779  } else if (block->predecessors()->length() == 1) {
780  // We have a single predecessor => copy environment and outgoing
781  // argument count from the predecessor.
782  ASSERT(block->phis()->length() == 0);
783  HBasicBlock* pred = block->predecessors()->at(0);
784  HEnvironment* last_environment = pred->last_environment();
785  ASSERT(last_environment != NULL);
786  // Only copy the environment, if it is later used again.
787  if (pred->end()->SecondSuccessor() == NULL) {
788  ASSERT(pred->end()->FirstSuccessor() == block);
789  } else {
790  if (pred->end()->FirstSuccessor()->block_id() > block->block_id() ||
791  pred->end()->SecondSuccessor()->block_id() > block->block_id()) {
792  last_environment = last_environment->Copy();
793  }
794  }
795  block->UpdateEnvironment(last_environment);
796  ASSERT(pred->argument_count() >= 0);
797  argument_count_ = pred->argument_count();
798  } else {
799  // We are at a state join => process phis.
800  HBasicBlock* pred = block->predecessors()->at(0);
801  // No need to copy the environment, it cannot be used later.
802  HEnvironment* last_environment = pred->last_environment();
803  for (int i = 0; i < block->phis()->length(); ++i) {
804  HPhi* phi = block->phis()->at(i);
805  if (phi->HasMergedIndex()) {
806  last_environment->SetValueAt(phi->merged_index(), phi);
807  }
808  }
809  for (int i = 0; i < block->deleted_phis()->length(); ++i) {
810  if (block->deleted_phis()->at(i) < last_environment->length()) {
811  last_environment->SetValueAt(block->deleted_phis()->at(i),
812  graph_->GetConstantUndefined());
813  }
814  }
815  block->UpdateEnvironment(last_environment);
816  // Pick up the outgoing argument count of one of the predecessors.
817  argument_count_ = pred->argument_count();
818  }
819  HInstruction* current = block->first();
820  int start = chunk_->instructions()->length();
821  while (current != NULL && !is_aborted()) {
822  // Code for constants in registers is generated lazily.
823  if (!current->EmitAtUses()) {
824  VisitInstruction(current);
825  }
826  current = current->next();
827  }
828  int end = chunk_->instructions()->length() - 1;
829  if (end >= start) {
830  block->set_first_instruction_index(start);
831  block->set_last_instruction_index(end);
832  }
833  block->set_argument_count(argument_count_);
834  next_block_ = NULL;
835  current_block_ = NULL;
836 }
837 
838 
839 void LChunkBuilder::VisitInstruction(HInstruction* current) {
840  HInstruction* old_current = current_instruction_;
841  current_instruction_ = current;
842 
843  LInstruction* instr = NULL;
844  if (current->CanReplaceWithDummyUses()) {
845  if (current->OperandCount() == 0) {
846  instr = DefineAsRegister(new(zone()) LDummy());
847  } else {
848  ASSERT(!current->OperandAt(0)->IsControlInstruction());
849  instr = DefineAsRegister(new(zone())
850  LDummyUse(UseAny(current->OperandAt(0))));
851  }
852  for (int i = 1; i < current->OperandCount(); ++i) {
853  if (current->OperandAt(i)->IsControlInstruction()) continue;
854  LInstruction* dummy =
855  new(zone()) LDummyUse(UseAny(current->OperandAt(i)));
856  dummy->set_hydrogen_value(current);
857  chunk_->AddInstruction(dummy, current_block_);
858  }
859  } else {
860  instr = current->CompileToLithium(this);
861  }
862 
863  argument_count_ += current->argument_delta();
864  ASSERT(argument_count_ >= 0);
865 
866  if (instr != NULL) {
867  // Associate the hydrogen instruction first, since we may need it for
868  // the ClobbersRegisters() or ClobbersDoubleRegisters() calls below.
869  instr->set_hydrogen_value(current);
870 
871 #if DEBUG
872  // Make sure that the lithium instruction has either no fixed register
873  // constraints in temps or the result OR no uses that are only used at
874  // start. If this invariant doesn't hold, the register allocator can decide
875  // to insert a split of a range immediately before the instruction due to an
876  // already allocated register needing to be used for the instruction's fixed
877  // register constraint. In this case, The register allocator won't see an
878  // interference between the split child and the use-at-start (it would if
879  // the it was just a plain use), so it is free to move the split child into
880  // the same register that is used for the use-at-start.
881  // See https://code.google.com/p/chromium/issues/detail?id=201590
882  if (!(instr->ClobbersRegisters() && instr->ClobbersDoubleRegisters())) {
883  int fixed = 0;
884  int used_at_start = 0;
885  for (UseIterator it(instr); !it.Done(); it.Advance()) {
886  LUnallocated* operand = LUnallocated::cast(it.Current());
887  if (operand->IsUsedAtStart()) ++used_at_start;
888  }
889  if (instr->Output() != NULL) {
890  if (LUnallocated::cast(instr->Output())->HasFixedPolicy()) ++fixed;
891  }
892  for (TempIterator it(instr); !it.Done(); it.Advance()) {
893  LUnallocated* operand = LUnallocated::cast(it.Current());
894  if (operand->HasFixedPolicy()) ++fixed;
895  }
896  ASSERT(fixed == 0 || used_at_start == 0);
897  }
898 #endif
899 
900  if (FLAG_stress_pointer_maps && !instr->HasPointerMap()) {
901  instr = AssignPointerMap(instr);
902  }
903  if (FLAG_stress_environments && !instr->HasEnvironment()) {
904  instr = AssignEnvironment(instr);
905  }
906  chunk_->AddInstruction(instr, current_block_);
907 
908  if (instr->IsCall()) {
909  HValue* hydrogen_value_for_lazy_bailout = current;
910  LInstruction* instruction_needing_environment = NULL;
911  if (current->HasObservableSideEffects()) {
912  HSimulate* sim = HSimulate::cast(current->next());
913  instruction_needing_environment = instr;
914  sim->ReplayEnvironment(current_block_->last_environment());
915  hydrogen_value_for_lazy_bailout = sim;
916  }
917  LInstruction* bailout = AssignEnvironment(new(zone()) LLazyBailout());
918  bailout->set_hydrogen_value(hydrogen_value_for_lazy_bailout);
919  chunk_->AddInstruction(bailout, current_block_);
920  if (instruction_needing_environment != NULL) {
921  // Store the lazy deopt environment with the instruction if needed.
922  // Right now it is only used for LInstanceOfKnownGlobal.
923  instruction_needing_environment->
924  SetDeferredLazyDeoptimizationEnvironment(bailout->environment());
925  }
926  }
927  }
928  current_instruction_ = old_current;
929 }
930 
931 
932 LInstruction* LChunkBuilder::DoGoto(HGoto* instr) {
933  return new(zone()) LGoto(instr->FirstSuccessor());
934 }
935 
936 
937 LInstruction* LChunkBuilder::DoBranch(HBranch* instr) {
938  LInstruction* goto_instr = CheckElideControlInstruction(instr);
939  if (goto_instr != NULL) return goto_instr;
940 
941  HValue* value = instr->value();
942  LBranch* result = new(zone()) LBranch(UseRegister(value));
943  // Tagged values that are not known smis or booleans require a
944  // deoptimization environment. If the instruction is generic no
945  // environment is needed since all cases are handled.
946  Representation rep = value->representation();
947  HType type = value->type();
948  ToBooleanStub::Types expected = instr->expected_input_types();
949  if (rep.IsTagged() && !type.IsSmi() && !type.IsBoolean() &&
950  !expected.IsGeneric()) {
951  return AssignEnvironment(result);
952  }
953  return result;
954 }
955 
956 
957 LInstruction* LChunkBuilder::DoCompareMap(HCompareMap* instr) {
958  LInstruction* goto_instr = CheckElideControlInstruction(instr);
959  if (goto_instr != NULL) return goto_instr;
960 
961  ASSERT(instr->value()->representation().IsTagged());
962  LOperand* value = UseRegisterAtStart(instr->value());
963  LOperand* temp = TempRegister();
964  return new(zone()) LCmpMapAndBranch(value, temp);
965 }
966 
967 
968 LInstruction* LChunkBuilder::DoArgumentsLength(HArgumentsLength* length) {
969  info()->MarkAsRequiresFrame();
970  return DefineAsRegister(
971  new(zone()) LArgumentsLength(UseRegister(length->value())));
972 }
973 
974 
975 LInstruction* LChunkBuilder::DoArgumentsElements(HArgumentsElements* elems) {
976  info()->MarkAsRequiresFrame();
977  return DefineAsRegister(new(zone()) LArgumentsElements);
978 }
979 
980 
981 LInstruction* LChunkBuilder::DoInstanceOf(HInstanceOf* instr) {
982  LOperand* context = UseFixed(instr->context(), cp);
983  LInstanceOf* result =
984  new(zone()) LInstanceOf(context, UseFixed(instr->left(), a0),
985  UseFixed(instr->right(), a1));
986  return MarkAsCall(DefineFixed(result, v0), instr);
987 }
988 
989 
990 LInstruction* LChunkBuilder::DoInstanceOfKnownGlobal(
991  HInstanceOfKnownGlobal* instr) {
992  LInstanceOfKnownGlobal* result =
993  new(zone()) LInstanceOfKnownGlobal(
994  UseFixed(instr->context(), cp),
995  UseFixed(instr->left(), a0),
996  FixedTemp(t0));
997  return MarkAsCall(DefineFixed(result, v0), instr);
998 }
999 
1000 
1001 LInstruction* LChunkBuilder::DoWrapReceiver(HWrapReceiver* instr) {
1002  LOperand* receiver = UseRegisterAtStart(instr->receiver());
1003  LOperand* function = UseRegisterAtStart(instr->function());
1004  LWrapReceiver* result = new(zone()) LWrapReceiver(receiver, function);
1005  return AssignEnvironment(DefineAsRegister(result));
1006 }
1007 
1008 
1009 LInstruction* LChunkBuilder::DoApplyArguments(HApplyArguments* instr) {
1010  LOperand* function = UseFixed(instr->function(), a1);
1011  LOperand* receiver = UseFixed(instr->receiver(), a0);
1012  LOperand* length = UseFixed(instr->length(), a2);
1013  LOperand* elements = UseFixed(instr->elements(), a3);
1014  LApplyArguments* result = new(zone()) LApplyArguments(function,
1015  receiver,
1016  length,
1017  elements);
1018  return MarkAsCall(DefineFixed(result, v0), instr, CAN_DEOPTIMIZE_EAGERLY);
1019 }
1020 
1021 
1022 LInstruction* LChunkBuilder::DoPushArgument(HPushArgument* instr) {
1023  LOperand* argument = Use(instr->argument());
1024  return new(zone()) LPushArgument(argument);
1025 }
1026 
1027 
1028 LInstruction* LChunkBuilder::DoStoreCodeEntry(
1029  HStoreCodeEntry* store_code_entry) {
1030  LOperand* function = UseRegister(store_code_entry->function());
1031  LOperand* code_object = UseTempRegister(store_code_entry->code_object());
1032  return new(zone()) LStoreCodeEntry(function, code_object);
1033 }
1034 
1035 
1036 LInstruction* LChunkBuilder::DoInnerAllocatedObject(
1037  HInnerAllocatedObject* instr) {
1038  LOperand* base_object = UseRegisterAtStart(instr->base_object());
1039  LOperand* offset = UseRegisterOrConstantAtStart(instr->offset());
1040  return DefineAsRegister(
1041  new(zone()) LInnerAllocatedObject(base_object, offset));
1042 }
1043 
1044 
1045 LInstruction* LChunkBuilder::DoThisFunction(HThisFunction* instr) {
1046  return instr->HasNoUses()
1047  ? NULL
1048  : DefineAsRegister(new(zone()) LThisFunction);
1049 }
1050 
1051 
1052 LInstruction* LChunkBuilder::DoContext(HContext* instr) {
1053  if (instr->HasNoUses()) return NULL;
1054 
1055  if (info()->IsStub()) {
1056  return DefineFixed(new(zone()) LContext, cp);
1057  }
1058 
1059  return DefineAsRegister(new(zone()) LContext);
1060 }
1061 
1062 
1063 LInstruction* LChunkBuilder::DoDeclareGlobals(HDeclareGlobals* instr) {
1064  LOperand* context = UseFixed(instr->context(), cp);
1065  return MarkAsCall(new(zone()) LDeclareGlobals(context), instr);
1066 }
1067 
1068 
1069 LInstruction* LChunkBuilder::DoCallJSFunction(
1070  HCallJSFunction* instr) {
1071  LOperand* function = UseFixed(instr->function(), a1);
1072 
1073  LCallJSFunction* result = new(zone()) LCallJSFunction(function);
1074 
1075  return MarkAsCall(DefineFixed(result, v0), instr);
1076 }
1077 
1078 
1079 LInstruction* LChunkBuilder::DoCallWithDescriptor(
1080  HCallWithDescriptor* instr) {
1081  const CallInterfaceDescriptor* descriptor = instr->descriptor();
1082 
1083  LOperand* target = UseRegisterOrConstantAtStart(instr->target());
1084  ZoneList<LOperand*> ops(instr->OperandCount(), zone());
1085  ops.Add(target, zone());
1086  for (int i = 1; i < instr->OperandCount(); i++) {
1087  LOperand* op = UseFixed(instr->OperandAt(i),
1088  descriptor->GetParameterRegister(i - 1));
1089  ops.Add(op, zone());
1090  }
1091 
1092  LCallWithDescriptor* result = new(zone()) LCallWithDescriptor(
1093  descriptor, ops, zone());
1094  return MarkAsCall(DefineFixed(result, v0), instr);
1095 }
1096 
1097 
1098 LInstruction* LChunkBuilder::DoInvokeFunction(HInvokeFunction* instr) {
1099  LOperand* context = UseFixed(instr->context(), cp);
1100  LOperand* function = UseFixed(instr->function(), a1);
1101  LInvokeFunction* result = new(zone()) LInvokeFunction(context, function);
1102  return MarkAsCall(DefineFixed(result, v0), instr, CANNOT_DEOPTIMIZE_EAGERLY);
1103 }
1104 
1105 
1106 LInstruction* LChunkBuilder::DoUnaryMathOperation(HUnaryMathOperation* instr) {
1107  switch (instr->op()) {
1108  case kMathFloor: return DoMathFloor(instr);
1109  case kMathRound: return DoMathRound(instr);
1110  case kMathAbs: return DoMathAbs(instr);
1111  case kMathLog: return DoMathLog(instr);
1112  case kMathExp: return DoMathExp(instr);
1113  case kMathSqrt: return DoMathSqrt(instr);
1114  case kMathPowHalf: return DoMathPowHalf(instr);
1115  case kMathClz32: return DoMathClz32(instr);
1116  default:
1117  UNREACHABLE();
1118  return NULL;
1119  }
1120 }
1121 
1122 
1123 LInstruction* LChunkBuilder::DoMathLog(HUnaryMathOperation* instr) {
1124  ASSERT(instr->representation().IsDouble());
1125  ASSERT(instr->value()->representation().IsDouble());
1126  LOperand* input = UseFixedDouble(instr->value(), f4);
1127  return MarkAsCall(DefineFixedDouble(new(zone()) LMathLog(input), f4), instr);
1128 }
1129 
1130 
1131 LInstruction* LChunkBuilder::DoMathClz32(HUnaryMathOperation* instr) {
1132  LOperand* input = UseRegisterAtStart(instr->value());
1133  LMathClz32* result = new(zone()) LMathClz32(input);
1134  return DefineAsRegister(result);
1135 }
1136 
1137 
1138 LInstruction* LChunkBuilder::DoMathExp(HUnaryMathOperation* instr) {
1139  ASSERT(instr->representation().IsDouble());
1140  ASSERT(instr->value()->representation().IsDouble());
1141  LOperand* input = UseRegister(instr->value());
1142  LOperand* temp1 = TempRegister();
1143  LOperand* temp2 = TempRegister();
1144  LOperand* double_temp = FixedTemp(f6); // Chosen by fair dice roll.
1145  LMathExp* result = new(zone()) LMathExp(input, double_temp, temp1, temp2);
1146  return DefineAsRegister(result);
1147 }
1148 
1149 
1150 LInstruction* LChunkBuilder::DoMathPowHalf(HUnaryMathOperation* instr) {
1151  // Input cannot be the same as the result, see LCodeGen::DoMathPowHalf.
1152  LOperand* input = UseFixedDouble(instr->value(), f8);
1153  LOperand* temp = FixedTemp(f6);
1154  LMathPowHalf* result = new(zone()) LMathPowHalf(input, temp);
1155  return DefineFixedDouble(result, f4);
1156 }
1157 
1158 
1159 LInstruction* LChunkBuilder::DoMathAbs(HUnaryMathOperation* instr) {
1160  Representation r = instr->value()->representation();
1161  LOperand* context = (r.IsDouble() || r.IsSmiOrInteger32())
1162  ? NULL
1163  : UseFixed(instr->context(), cp);
1164  LOperand* input = UseRegister(instr->value());
1165  LMathAbs* result = new(zone()) LMathAbs(context, input);
1166  return AssignEnvironment(AssignPointerMap(DefineAsRegister(result)));
1167 }
1168 
1169 
1170 LInstruction* LChunkBuilder::DoMathFloor(HUnaryMathOperation* instr) {
1171  LOperand* input = UseRegister(instr->value());
1172  LOperand* temp = TempRegister();
1173  LMathFloor* result = new(zone()) LMathFloor(input, temp);
1174  return AssignEnvironment(AssignPointerMap(DefineAsRegister(result)));
1175 }
1176 
1177 
1178 LInstruction* LChunkBuilder::DoMathSqrt(HUnaryMathOperation* instr) {
1179  LOperand* input = UseRegister(instr->value());
1180  LMathSqrt* result = new(zone()) LMathSqrt(input);
1181  return DefineAsRegister(result);
1182 }
1183 
1184 
1185 LInstruction* LChunkBuilder::DoMathRound(HUnaryMathOperation* instr) {
1186  LOperand* input = UseRegister(instr->value());
1187  LOperand* temp = FixedTemp(f6);
1188  LMathRound* result = new(zone()) LMathRound(input, temp);
1189  return AssignEnvironment(DefineAsRegister(result));
1190 }
1191 
1192 
1193 LInstruction* LChunkBuilder::DoCallNew(HCallNew* instr) {
1194  LOperand* context = UseFixed(instr->context(), cp);
1195  LOperand* constructor = UseFixed(instr->constructor(), a1);
1196  LCallNew* result = new(zone()) LCallNew(context, constructor);
1197  return MarkAsCall(DefineFixed(result, v0), instr);
1198 }
1199 
1200 
1201 LInstruction* LChunkBuilder::DoCallNewArray(HCallNewArray* instr) {
1202  LOperand* context = UseFixed(instr->context(), cp);
1203  LOperand* constructor = UseFixed(instr->constructor(), a1);
1204  LCallNewArray* result = new(zone()) LCallNewArray(context, constructor);
1205  return MarkAsCall(DefineFixed(result, v0), instr);
1206 }
1207 
1208 
1209 LInstruction* LChunkBuilder::DoCallFunction(HCallFunction* instr) {
1210  LOperand* context = UseFixed(instr->context(), cp);
1211  LOperand* function = UseFixed(instr->function(), a1);
1212  LCallFunction* call = new(zone()) LCallFunction(context, function);
1213  return MarkAsCall(DefineFixed(call, v0), instr);
1214 }
1215 
1216 
1217 LInstruction* LChunkBuilder::DoCallRuntime(HCallRuntime* instr) {
1218  LOperand* context = UseFixed(instr->context(), cp);
1219  return MarkAsCall(DefineFixed(new(zone()) LCallRuntime(context), v0), instr);
1220 }
1221 
1222 
1223 LInstruction* LChunkBuilder::DoRor(HRor* instr) {
1224  return DoShift(Token::ROR, instr);
1225 }
1226 
1227 
1228 LInstruction* LChunkBuilder::DoShr(HShr* instr) {
1229  return DoShift(Token::SHR, instr);
1230 }
1231 
1232 
1233 LInstruction* LChunkBuilder::DoSar(HSar* instr) {
1234  return DoShift(Token::SAR, instr);
1235 }
1236 
1237 
1238 LInstruction* LChunkBuilder::DoShl(HShl* instr) {
1239  return DoShift(Token::SHL, instr);
1240 }
1241 
1242 
1243 LInstruction* LChunkBuilder::DoBitwise(HBitwise* instr) {
1244  if (instr->representation().IsSmiOrInteger32()) {
1245  ASSERT(instr->left()->representation().Equals(instr->representation()));
1246  ASSERT(instr->right()->representation().Equals(instr->representation()));
1247  ASSERT(instr->CheckFlag(HValue::kTruncatingToInt32));
1248 
1249  LOperand* left = UseRegisterAtStart(instr->BetterLeftOperand());
1250  LOperand* right = UseOrConstantAtStart(instr->BetterRightOperand());
1251  return DefineAsRegister(new(zone()) LBitI(left, right));
1252  } else {
1253  return DoArithmeticT(instr->op(), instr);
1254  }
1255 }
1256 
1257 
1258 LInstruction* LChunkBuilder::DoDivByPowerOf2I(HDiv* instr) {
1259  ASSERT(instr->representation().IsSmiOrInteger32());
1260  ASSERT(instr->left()->representation().Equals(instr->representation()));
1261  ASSERT(instr->right()->representation().Equals(instr->representation()));
1262  LOperand* dividend = UseRegister(instr->left());
1263  int32_t divisor = instr->right()->GetInteger32Constant();
1264  LInstruction* result = DefineAsRegister(new(zone()) LDivByPowerOf2I(
1265  dividend, divisor));
1266  if ((instr->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) ||
1267  (instr->CheckFlag(HValue::kCanOverflow) && divisor == -1) ||
1268  (!instr->CheckFlag(HInstruction::kAllUsesTruncatingToInt32) &&
1269  divisor != 1 && divisor != -1)) {
1270  result = AssignEnvironment(result);
1271  }
1272  return result;
1273 }
1274 
1275 
1276 LInstruction* LChunkBuilder::DoDivByConstI(HDiv* instr) {
1277  ASSERT(instr->representation().IsInteger32());
1278  ASSERT(instr->left()->representation().Equals(instr->representation()));
1279  ASSERT(instr->right()->representation().Equals(instr->representation()));
1280  LOperand* dividend = UseRegister(instr->left());
1281  int32_t divisor = instr->right()->GetInteger32Constant();
1282  LInstruction* result = DefineAsRegister(new(zone()) LDivByConstI(
1283  dividend, divisor));
1284  if (divisor == 0 ||
1285  (instr->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) ||
1286  !instr->CheckFlag(HInstruction::kAllUsesTruncatingToInt32)) {
1287  result = AssignEnvironment(result);
1288  }
1289  return result;
1290 }
1291 
1292 
1293 LInstruction* LChunkBuilder::DoDivI(HBinaryOperation* instr) {
1294  ASSERT(instr->representation().IsSmiOrInteger32());
1295  ASSERT(instr->left()->representation().Equals(instr->representation()));
1296  ASSERT(instr->right()->representation().Equals(instr->representation()));
1297  LOperand* dividend = UseRegister(instr->left());
1298  LOperand* divisor = UseRegister(instr->right());
1299  LDivI* div = new(zone()) LDivI(dividend, divisor);
1300  return AssignEnvironment(DefineAsRegister(div));
1301 }
1302 
1303 
1304 LInstruction* LChunkBuilder::DoDiv(HDiv* instr) {
1305  if (instr->representation().IsSmiOrInteger32()) {
1306  if (instr->RightIsPowerOf2()) {
1307  return DoDivByPowerOf2I(instr);
1308  } else if (instr->right()->IsConstant()) {
1309  return DoDivByConstI(instr);
1310  } else {
1311  return DoDivI(instr);
1312  }
1313  } else if (instr->representation().IsDouble()) {
1314  return DoArithmeticD(Token::DIV, instr);
1315  } else {
1316  return DoArithmeticT(Token::DIV, instr);
1317  }
1318 }
1319 
1320 
1321 LInstruction* LChunkBuilder::DoFlooringDivByPowerOf2I(HMathFloorOfDiv* instr) {
1322  LOperand* dividend = UseRegisterAtStart(instr->left());
1323  int32_t divisor = instr->right()->GetInteger32Constant();
1324  LInstruction* result = DefineAsRegister(new(zone()) LFlooringDivByPowerOf2I(
1325  dividend, divisor));
1326  if ((instr->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) ||
1327  (instr->CheckFlag(HValue::kLeftCanBeMinInt) && divisor == -1)) {
1328  result = AssignEnvironment(result);
1329  }
1330  return result;
1331 }
1332 
1333 
1334 LInstruction* LChunkBuilder::DoFlooringDivByConstI(HMathFloorOfDiv* instr) {
1335  ASSERT(instr->representation().IsInteger32());
1336  ASSERT(instr->left()->representation().Equals(instr->representation()));
1337  ASSERT(instr->right()->representation().Equals(instr->representation()));
1338  LOperand* dividend = UseRegister(instr->left());
1339  int32_t divisor = instr->right()->GetInteger32Constant();
1340  LOperand* temp =
1341  ((divisor > 0 && !instr->CheckFlag(HValue::kLeftCanBeNegative)) ||
1342  (divisor < 0 && !instr->CheckFlag(HValue::kLeftCanBePositive))) ?
1343  NULL : TempRegister();
1344  LInstruction* result = DefineAsRegister(
1345  new(zone()) LFlooringDivByConstI(dividend, divisor, temp));
1346  if (divisor == 0 ||
1347  (instr->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0)) {
1348  result = AssignEnvironment(result);
1349  }
1350  return result;
1351 }
1352 
1353 
1354 LInstruction* LChunkBuilder::DoMathFloorOfDiv(HMathFloorOfDiv* instr) {
1355  if (instr->RightIsPowerOf2()) {
1356  return DoFlooringDivByPowerOf2I(instr);
1357  } else if (instr->right()->IsConstant()) {
1358  return DoFlooringDivByConstI(instr);
1359  } else {
1360  return DoDivI(instr);
1361  }
1362 }
1363 
1364 
1365 LInstruction* LChunkBuilder::DoModByPowerOf2I(HMod* instr) {
1366  ASSERT(instr->representation().IsSmiOrInteger32());
1367  ASSERT(instr->left()->representation().Equals(instr->representation()));
1368  ASSERT(instr->right()->representation().Equals(instr->representation()));
1369  LOperand* dividend = UseRegisterAtStart(instr->left());
1370  int32_t divisor = instr->right()->GetInteger32Constant();
1371  LInstruction* result = DefineSameAsFirst(new(zone()) LModByPowerOf2I(
1372  dividend, divisor));
1373  if (instr->CheckFlag(HValue::kBailoutOnMinusZero)) {
1374  result = AssignEnvironment(result);
1375  }
1376  return result;
1377 }
1378 
1379 
1380 LInstruction* LChunkBuilder::DoModByConstI(HMod* instr) {
1381  ASSERT(instr->representation().IsSmiOrInteger32());
1382  ASSERT(instr->left()->representation().Equals(instr->representation()));
1383  ASSERT(instr->right()->representation().Equals(instr->representation()));
1384  LOperand* dividend = UseRegister(instr->left());
1385  int32_t divisor = instr->right()->GetInteger32Constant();
1386  LInstruction* result = DefineAsRegister(new(zone()) LModByConstI(
1387  dividend, divisor));
1388  if (divisor == 0 || instr->CheckFlag(HValue::kBailoutOnMinusZero)) {
1389  result = AssignEnvironment(result);
1390  }
1391  return result;
1392 }
1393 
1394 
1395 LInstruction* LChunkBuilder::DoModI(HMod* instr) {
1396  ASSERT(instr->representation().IsSmiOrInteger32());
1397  ASSERT(instr->left()->representation().Equals(instr->representation()));
1398  ASSERT(instr->right()->representation().Equals(instr->representation()));
1399  LOperand* dividend = UseRegister(instr->left());
1400  LOperand* divisor = UseRegister(instr->right());
1401  LInstruction* result = DefineAsRegister(new(zone()) LModI(
1402  dividend, divisor));
1403  if (instr->CheckFlag(HValue::kCanBeDivByZero) ||
1404  instr->CheckFlag(HValue::kBailoutOnMinusZero)) {
1405  result = AssignEnvironment(result);
1406  }
1407  return result;
1408 }
1409 
1410 
1411 LInstruction* LChunkBuilder::DoMod(HMod* instr) {
1412  if (instr->representation().IsSmiOrInteger32()) {
1413  return instr->RightIsPowerOf2() ? DoModByPowerOf2I(instr) : DoModI(instr);
1414  } else if (instr->representation().IsDouble()) {
1415  return DoArithmeticD(Token::MOD, instr);
1416  } else {
1417  return DoArithmeticT(Token::MOD, instr);
1418  }
1419 }
1420 
1421 
1422 LInstruction* LChunkBuilder::DoMul(HMul* instr) {
1423  if (instr->representation().IsSmiOrInteger32()) {
1424  ASSERT(instr->left()->representation().Equals(instr->representation()));
1425  ASSERT(instr->right()->representation().Equals(instr->representation()));
1426  HValue* left = instr->BetterLeftOperand();
1427  HValue* right = instr->BetterRightOperand();
1428  LOperand* left_op;
1429  LOperand* right_op;
1430  bool can_overflow = instr->CheckFlag(HValue::kCanOverflow);
1431  bool bailout_on_minus_zero = instr->CheckFlag(HValue::kBailoutOnMinusZero);
1432 
1433  if (right->IsConstant()) {
1434  HConstant* constant = HConstant::cast(right);
1435  int32_t constant_value = constant->Integer32Value();
1436  // Constants -1, 0 and 1 can be optimized if the result can overflow.
1437  // For other constants, it can be optimized only without overflow.
1438  if (!can_overflow || ((constant_value >= -1) && (constant_value <= 1))) {
1439  left_op = UseRegisterAtStart(left);
1440  right_op = UseConstant(right);
1441  } else {
1442  if (bailout_on_minus_zero) {
1443  left_op = UseRegister(left);
1444  } else {
1445  left_op = UseRegisterAtStart(left);
1446  }
1447  right_op = UseRegister(right);
1448  }
1449  } else {
1450  if (bailout_on_minus_zero) {
1451  left_op = UseRegister(left);
1452  } else {
1453  left_op = UseRegisterAtStart(left);
1454  }
1455  right_op = UseRegister(right);
1456  }
1457  LMulI* mul = new(zone()) LMulI(left_op, right_op);
1458  if (can_overflow || bailout_on_minus_zero) {
1459  AssignEnvironment(mul);
1460  }
1461  return DefineAsRegister(mul);
1462 
1463  } else if (instr->representation().IsDouble()) {
1464  if (kArchVariant == kMips32r2) {
1465  if (instr->UseCount() == 1 && instr->uses().value()->IsAdd()) {
1466  HAdd* add = HAdd::cast(instr->uses().value());
1467  if (instr == add->left()) {
1468  // This mul is the lhs of an add. The add and mul will be folded
1469  // into a multiply-add.
1470  return NULL;
1471  }
1472  if (instr == add->right() && !add->left()->IsMul()) {
1473  // This mul is the rhs of an add, where the lhs is not another mul.
1474  // The add and mul will be folded into a multiply-add.
1475  return NULL;
1476  }
1477  }
1478  }
1479  return DoArithmeticD(Token::MUL, instr);
1480  } else {
1481  return DoArithmeticT(Token::MUL, instr);
1482  }
1483 }
1484 
1485 
1486 LInstruction* LChunkBuilder::DoSub(HSub* instr) {
1487  if (instr->representation().IsSmiOrInteger32()) {
1488  ASSERT(instr->left()->representation().Equals(instr->representation()));
1489  ASSERT(instr->right()->representation().Equals(instr->representation()));
1490  LOperand* left = UseRegisterAtStart(instr->left());
1491  LOperand* right = UseOrConstantAtStart(instr->right());
1492  LSubI* sub = new(zone()) LSubI(left, right);
1493  LInstruction* result = DefineAsRegister(sub);
1494  if (instr->CheckFlag(HValue::kCanOverflow)) {
1495  result = AssignEnvironment(result);
1496  }
1497  return result;
1498  } else if (instr->representation().IsDouble()) {
1499  return DoArithmeticD(Token::SUB, instr);
1500  } else {
1501  return DoArithmeticT(Token::SUB, instr);
1502  }
1503 }
1504 
1505 
1506 LInstruction* LChunkBuilder::DoMultiplyAdd(HMul* mul, HValue* addend) {
1507  LOperand* multiplier_op = UseRegisterAtStart(mul->left());
1508  LOperand* multiplicand_op = UseRegisterAtStart(mul->right());
1509  LOperand* addend_op = UseRegisterAtStart(addend);
1510  return DefineSameAsFirst(new(zone()) LMultiplyAddD(addend_op, multiplier_op,
1511  multiplicand_op));
1512 }
1513 
1514 
1515 LInstruction* LChunkBuilder::DoAdd(HAdd* instr) {
1516  if (instr->representation().IsSmiOrInteger32()) {
1517  ASSERT(instr->left()->representation().Equals(instr->representation()));
1518  ASSERT(instr->right()->representation().Equals(instr->representation()));
1519  LOperand* left = UseRegisterAtStart(instr->BetterLeftOperand());
1520  LOperand* right = UseOrConstantAtStart(instr->BetterRightOperand());
1521  LAddI* add = new(zone()) LAddI(left, right);
1522  LInstruction* result = DefineAsRegister(add);
1523  if (instr->CheckFlag(HValue::kCanOverflow)) {
1524  result = AssignEnvironment(result);
1525  }
1526  return result;
1527  } else if (instr->representation().IsExternal()) {
1528  ASSERT(instr->left()->representation().IsExternal());
1529  ASSERT(instr->right()->representation().IsInteger32());
1530  ASSERT(!instr->CheckFlag(HValue::kCanOverflow));
1531  LOperand* left = UseRegisterAtStart(instr->left());
1532  LOperand* right = UseOrConstantAtStart(instr->right());
1533  LAddI* add = new(zone()) LAddI(left, right);
1534  LInstruction* result = DefineAsRegister(add);
1535  return result;
1536  } else if (instr->representation().IsDouble()) {
1537  if (kArchVariant == kMips32r2) {
1538  if (instr->left()->IsMul())
1539  return DoMultiplyAdd(HMul::cast(instr->left()), instr->right());
1540 
1541  if (instr->right()->IsMul()) {
1542  ASSERT(!instr->left()->IsMul());
1543  return DoMultiplyAdd(HMul::cast(instr->right()), instr->left());
1544  }
1545  }
1546  return DoArithmeticD(Token::ADD, instr);
1547  } else {
1548  return DoArithmeticT(Token::ADD, instr);
1549  }
1550 }
1551 
1552 
1553 LInstruction* LChunkBuilder::DoMathMinMax(HMathMinMax* instr) {
1554  LOperand* left = NULL;
1555  LOperand* right = NULL;
1556  if (instr->representation().IsSmiOrInteger32()) {
1557  ASSERT(instr->left()->representation().Equals(instr->representation()));
1558  ASSERT(instr->right()->representation().Equals(instr->representation()));
1559  left = UseRegisterAtStart(instr->BetterLeftOperand());
1560  right = UseOrConstantAtStart(instr->BetterRightOperand());
1561  } else {
1562  ASSERT(instr->representation().IsDouble());
1563  ASSERT(instr->left()->representation().IsDouble());
1564  ASSERT(instr->right()->representation().IsDouble());
1565  left = UseRegisterAtStart(instr->left());
1566  right = UseRegisterAtStart(instr->right());
1567  }
1568  return DefineAsRegister(new(zone()) LMathMinMax(left, right));
1569 }
1570 
1571 
1572 LInstruction* LChunkBuilder::DoPower(HPower* instr) {
1573  ASSERT(instr->representation().IsDouble());
1574  // We call a C function for double power. It can't trigger a GC.
1575  // We need to use fixed result register for the call.
1576  Representation exponent_type = instr->right()->representation();
1577  ASSERT(instr->left()->representation().IsDouble());
1578  LOperand* left = UseFixedDouble(instr->left(), f2);
1579  LOperand* right = exponent_type.IsDouble() ?
1580  UseFixedDouble(instr->right(), f4) :
1581  UseFixed(instr->right(), a2);
1582  LPower* result = new(zone()) LPower(left, right);
1583  return MarkAsCall(DefineFixedDouble(result, f0),
1584  instr,
1585  CAN_DEOPTIMIZE_EAGERLY);
1586 }
1587 
1588 
1589 LInstruction* LChunkBuilder::DoCompareGeneric(HCompareGeneric* instr) {
1590  ASSERT(instr->left()->representation().IsTagged());
1591  ASSERT(instr->right()->representation().IsTagged());
1592  LOperand* context = UseFixed(instr->context(), cp);
1593  LOperand* left = UseFixed(instr->left(), a1);
1594  LOperand* right = UseFixed(instr->right(), a0);
1595  LCmpT* result = new(zone()) LCmpT(context, left, right);
1596  return MarkAsCall(DefineFixed(result, v0), instr);
1597 }
1598 
1599 
1600 LInstruction* LChunkBuilder::DoCompareNumericAndBranch(
1601  HCompareNumericAndBranch* instr) {
1602  Representation r = instr->representation();
1603  if (r.IsSmiOrInteger32()) {
1604  ASSERT(instr->left()->representation().Equals(r));
1605  ASSERT(instr->right()->representation().Equals(r));
1606  LOperand* left = UseRegisterOrConstantAtStart(instr->left());
1607  LOperand* right = UseRegisterOrConstantAtStart(instr->right());
1608  return new(zone()) LCompareNumericAndBranch(left, right);
1609  } else {
1610  ASSERT(r.IsDouble());
1611  ASSERT(instr->left()->representation().IsDouble());
1612  ASSERT(instr->right()->representation().IsDouble());
1613  LOperand* left = UseRegisterAtStart(instr->left());
1614  LOperand* right = UseRegisterAtStart(instr->right());
1615  return new(zone()) LCompareNumericAndBranch(left, right);
1616  }
1617 }
1618 
1619 
1620 LInstruction* LChunkBuilder::DoCompareObjectEqAndBranch(
1621  HCompareObjectEqAndBranch* instr) {
1622  LInstruction* goto_instr = CheckElideControlInstruction(instr);
1623  if (goto_instr != NULL) return goto_instr;
1624  LOperand* left = UseRegisterAtStart(instr->left());
1625  LOperand* right = UseRegisterAtStart(instr->right());
1626  return new(zone()) LCmpObjectEqAndBranch(left, right);
1627 }
1628 
1629 
1630 LInstruction* LChunkBuilder::DoCompareHoleAndBranch(
1631  HCompareHoleAndBranch* instr) {
1632  LOperand* value = UseRegisterAtStart(instr->value());
1633  return new(zone()) LCmpHoleAndBranch(value);
1634 }
1635 
1636 
1637 LInstruction* LChunkBuilder::DoCompareMinusZeroAndBranch(
1638  HCompareMinusZeroAndBranch* instr) {
1639  LInstruction* goto_instr = CheckElideControlInstruction(instr);
1640  if (goto_instr != NULL) return goto_instr;
1641  LOperand* value = UseRegister(instr->value());
1642  LOperand* scratch = TempRegister();
1643  return new(zone()) LCompareMinusZeroAndBranch(value, scratch);
1644 }
1645 
1646 
1647 LInstruction* LChunkBuilder::DoIsObjectAndBranch(HIsObjectAndBranch* instr) {
1648  ASSERT(instr->value()->representation().IsTagged());
1649  LOperand* temp = TempRegister();
1650  return new(zone()) LIsObjectAndBranch(UseRegisterAtStart(instr->value()),
1651  temp);
1652 }
1653 
1654 
1655 LInstruction* LChunkBuilder::DoIsStringAndBranch(HIsStringAndBranch* instr) {
1656  ASSERT(instr->value()->representation().IsTagged());
1657  LOperand* temp = TempRegister();
1658  return new(zone()) LIsStringAndBranch(UseRegisterAtStart(instr->value()),
1659  temp);
1660 }
1661 
1662 
1663 LInstruction* LChunkBuilder::DoIsSmiAndBranch(HIsSmiAndBranch* instr) {
1664  ASSERT(instr->value()->representation().IsTagged());
1665  return new(zone()) LIsSmiAndBranch(Use(instr->value()));
1666 }
1667 
1668 
1669 LInstruction* LChunkBuilder::DoIsUndetectableAndBranch(
1670  HIsUndetectableAndBranch* instr) {
1671  ASSERT(instr->value()->representation().IsTagged());
1672  return new(zone()) LIsUndetectableAndBranch(
1673  UseRegisterAtStart(instr->value()), TempRegister());
1674 }
1675 
1676 
1677 LInstruction* LChunkBuilder::DoStringCompareAndBranch(
1678  HStringCompareAndBranch* instr) {
1679  ASSERT(instr->left()->representation().IsTagged());
1680  ASSERT(instr->right()->representation().IsTagged());
1681  LOperand* context = UseFixed(instr->context(), cp);
1682  LOperand* left = UseFixed(instr->left(), a1);
1683  LOperand* right = UseFixed(instr->right(), a0);
1684  LStringCompareAndBranch* result =
1685  new(zone()) LStringCompareAndBranch(context, left, right);
1686  return MarkAsCall(result, instr);
1687 }
1688 
1689 
1690 LInstruction* LChunkBuilder::DoHasInstanceTypeAndBranch(
1691  HHasInstanceTypeAndBranch* instr) {
1692  ASSERT(instr->value()->representation().IsTagged());
1693  LOperand* value = UseRegisterAtStart(instr->value());
1694  return new(zone()) LHasInstanceTypeAndBranch(value);
1695 }
1696 
1697 
1698 LInstruction* LChunkBuilder::DoGetCachedArrayIndex(
1699  HGetCachedArrayIndex* instr) {
1700  ASSERT(instr->value()->representation().IsTagged());
1701  LOperand* value = UseRegisterAtStart(instr->value());
1702 
1703  return DefineAsRegister(new(zone()) LGetCachedArrayIndex(value));
1704 }
1705 
1706 
1707 LInstruction* LChunkBuilder::DoHasCachedArrayIndexAndBranch(
1708  HHasCachedArrayIndexAndBranch* instr) {
1709  ASSERT(instr->value()->representation().IsTagged());
1710  return new(zone()) LHasCachedArrayIndexAndBranch(
1711  UseRegisterAtStart(instr->value()));
1712 }
1713 
1714 
1715 LInstruction* LChunkBuilder::DoClassOfTestAndBranch(
1716  HClassOfTestAndBranch* instr) {
1717  ASSERT(instr->value()->representation().IsTagged());
1718  return new(zone()) LClassOfTestAndBranch(UseRegister(instr->value()),
1719  TempRegister());
1720 }
1721 
1722 
1723 LInstruction* LChunkBuilder::DoMapEnumLength(HMapEnumLength* instr) {
1724  LOperand* map = UseRegisterAtStart(instr->value());
1725  return DefineAsRegister(new(zone()) LMapEnumLength(map));
1726 }
1727 
1728 
1729 LInstruction* LChunkBuilder::DoDateField(HDateField* instr) {
1730  LOperand* object = UseFixed(instr->value(), a0);
1731  LDateField* result =
1732  new(zone()) LDateField(object, FixedTemp(a1), instr->index());
1733  return MarkAsCall(DefineFixed(result, v0), instr, CAN_DEOPTIMIZE_EAGERLY);
1734 }
1735 
1736 
1737 LInstruction* LChunkBuilder::DoSeqStringGetChar(HSeqStringGetChar* instr) {
1738  LOperand* string = UseRegisterAtStart(instr->string());
1739  LOperand* index = UseRegisterOrConstantAtStart(instr->index());
1740  return DefineAsRegister(new(zone()) LSeqStringGetChar(string, index));
1741 }
1742 
1743 
1744 LInstruction* LChunkBuilder::DoSeqStringSetChar(HSeqStringSetChar* instr) {
1745  LOperand* string = UseRegisterAtStart(instr->string());
1746  LOperand* index = FLAG_debug_code
1747  ? UseRegisterAtStart(instr->index())
1748  : UseRegisterOrConstantAtStart(instr->index());
1749  LOperand* value = UseRegisterAtStart(instr->value());
1750  LOperand* context = FLAG_debug_code ? UseFixed(instr->context(), cp) : NULL;
1751  return new(zone()) LSeqStringSetChar(context, string, index, value);
1752 }
1753 
1754 
1755 LInstruction* LChunkBuilder::DoBoundsCheck(HBoundsCheck* instr) {
1756  LOperand* value = UseRegisterOrConstantAtStart(instr->index());
1757  LOperand* length = UseRegister(instr->length());
1758  return AssignEnvironment(new(zone()) LBoundsCheck(value, length));
1759 }
1760 
1761 
1762 LInstruction* LChunkBuilder::DoBoundsCheckBaseIndexInformation(
1763  HBoundsCheckBaseIndexInformation* instr) {
1764  UNREACHABLE();
1765  return NULL;
1766 }
1767 
1768 
1769 LInstruction* LChunkBuilder::DoAbnormalExit(HAbnormalExit* instr) {
1770  // The control instruction marking the end of a block that completed
1771  // abruptly (e.g., threw an exception). There is nothing specific to do.
1772  return NULL;
1773 }
1774 
1775 
1776 LInstruction* LChunkBuilder::DoUseConst(HUseConst* instr) {
1777  return NULL;
1778 }
1779 
1780 
1781 LInstruction* LChunkBuilder::DoForceRepresentation(HForceRepresentation* bad) {
1782  // All HForceRepresentation instructions should be eliminated in the
1783  // representation change phase of Hydrogen.
1784  UNREACHABLE();
1785  return NULL;
1786 }
1787 
1788 
1789 LInstruction* LChunkBuilder::DoChange(HChange* instr) {
1790  Representation from = instr->from();
1791  Representation to = instr->to();
1792  if (from.IsSmi()) {
1793  if (to.IsTagged()) {
1794  LOperand* value = UseRegister(instr->value());
1795  return DefineSameAsFirst(new(zone()) LDummyUse(value));
1796  }
1797  from = Representation::Tagged();
1798  }
1799  if (from.IsTagged()) {
1800  if (to.IsDouble()) {
1801  LOperand* value = UseRegister(instr->value());
1802  LNumberUntagD* res = new(zone()) LNumberUntagD(value);
1803  return AssignEnvironment(DefineAsRegister(res));
1804  } else if (to.IsSmi()) {
1805  HValue* val = instr->value();
1806  LOperand* value = UseRegister(val);
1807  if (val->type().IsSmi()) {
1808  return DefineSameAsFirst(new(zone()) LDummyUse(value));
1809  }
1810  return AssignEnvironment(DefineSameAsFirst(new(zone()) LCheckSmi(value)));
1811  } else {
1812  ASSERT(to.IsInteger32());
1813  LOperand* value = NULL;
1814  LInstruction* res = NULL;
1815  HValue* val = instr->value();
1816  if (val->type().IsSmi() || val->representation().IsSmi()) {
1817  value = UseRegisterAtStart(val);
1818  res = DefineAsRegister(new(zone()) LSmiUntag(value, false));
1819  } else {
1820  value = UseRegister(val);
1821  LOperand* temp1 = TempRegister();
1822  LOperand* temp2 = FixedTemp(f22);
1823  res = DefineSameAsFirst(new(zone()) LTaggedToI(value,
1824  temp1,
1825  temp2));
1826  res = AssignEnvironment(res);
1827  }
1828  return res;
1829  }
1830  } else if (from.IsDouble()) {
1831  if (to.IsTagged()) {
1832  info()->MarkAsDeferredCalling();
1833  LOperand* value = UseRegister(instr->value());
1834  LOperand* temp1 = TempRegister();
1835  LOperand* temp2 = TempRegister();
1836 
1837  // Make sure that the temp and result_temp registers are
1838  // different.
1839  LUnallocated* result_temp = TempRegister();
1840  LNumberTagD* result = new(zone()) LNumberTagD(value, temp1, temp2);
1841  Define(result, result_temp);
1842  return AssignPointerMap(result);
1843  } else if (to.IsSmi()) {
1844  LOperand* value = UseRegister(instr->value());
1845  return AssignEnvironment(
1846  DefineAsRegister(new(zone()) LDoubleToSmi(value)));
1847  } else {
1848  ASSERT(to.IsInteger32());
1849  LOperand* value = UseRegister(instr->value());
1850  LDoubleToI* res = new(zone()) LDoubleToI(value);
1851  return AssignEnvironment(DefineAsRegister(res));
1852  }
1853  } else if (from.IsInteger32()) {
1854  info()->MarkAsDeferredCalling();
1855  if (to.IsTagged()) {
1856  HValue* val = instr->value();
1857  LOperand* value = UseRegisterAtStart(val);
1858  if (!instr->CheckFlag(HValue::kCanOverflow)) {
1859  return DefineAsRegister(new(zone()) LSmiTag(value));
1860  } else if (val->CheckFlag(HInstruction::kUint32)) {
1861  LOperand* temp1 = TempRegister();
1862  LOperand* temp2 = TempRegister();
1863  LNumberTagU* result = new(zone()) LNumberTagU(value, temp1, temp2);
1864  return AssignEnvironment(AssignPointerMap(DefineAsRegister(result)));
1865  } else {
1866  LOperand* temp1 = TempRegister();
1867  LOperand* temp2 = TempRegister();
1868  LNumberTagI* result = new(zone()) LNumberTagI(value, temp1, temp2);
1869  return AssignEnvironment(AssignPointerMap(DefineAsRegister(result)));
1870  }
1871  } else if (to.IsSmi()) {
1872  HValue* val = instr->value();
1873  LOperand* value = UseRegister(val);
1874  LInstruction* result = DefineAsRegister(new(zone()) LSmiTag(value));
1875  if (instr->CheckFlag(HValue::kCanOverflow)) {
1876  result = AssignEnvironment(result);
1877  }
1878  return result;
1879  } else {
1880  ASSERT(to.IsDouble());
1881  if (instr->value()->CheckFlag(HInstruction::kUint32)) {
1882  return DefineAsRegister(
1883  new(zone()) LUint32ToDouble(UseRegister(instr->value())));
1884  } else {
1885  return DefineAsRegister(
1886  new(zone()) LInteger32ToDouble(Use(instr->value())));
1887  }
1888  }
1889  }
1890  UNREACHABLE();
1891  return NULL;
1892 }
1893 
1894 
1895 LInstruction* LChunkBuilder::DoCheckHeapObject(HCheckHeapObject* instr) {
1896  LOperand* value = UseRegisterAtStart(instr->value());
1897  return AssignEnvironment(new(zone()) LCheckNonSmi(value));
1898 }
1899 
1900 
1901 LInstruction* LChunkBuilder::DoCheckSmi(HCheckSmi* instr) {
1902  LOperand* value = UseRegisterAtStart(instr->value());
1903  return AssignEnvironment(new(zone()) LCheckSmi(value));
1904 }
1905 
1906 
1907 LInstruction* LChunkBuilder::DoCheckInstanceType(HCheckInstanceType* instr) {
1908  LOperand* value = UseRegisterAtStart(instr->value());
1909  LInstruction* result = new(zone()) LCheckInstanceType(value);
1910  return AssignEnvironment(result);
1911 }
1912 
1913 
1914 LInstruction* LChunkBuilder::DoCheckValue(HCheckValue* instr) {
1915  LOperand* value = UseRegisterAtStart(instr->value());
1916  return AssignEnvironment(new(zone()) LCheckValue(value));
1917 }
1918 
1919 
1920 LInstruction* LChunkBuilder::DoCheckMaps(HCheckMaps* instr) {
1921  LOperand* value = NULL;
1922  if (!instr->CanOmitMapChecks()) {
1923  value = UseRegisterAtStart(instr->value());
1924  if (instr->has_migration_target()) info()->MarkAsDeferredCalling();
1925  }
1926  LCheckMaps* result = new(zone()) LCheckMaps(value);
1927  if (!instr->CanOmitMapChecks()) {
1928  AssignEnvironment(result);
1929  if (instr->has_migration_target()) return AssignPointerMap(result);
1930  }
1931  return result;
1932 }
1933 
1934 
1935 LInstruction* LChunkBuilder::DoClampToUint8(HClampToUint8* instr) {
1936  HValue* value = instr->value();
1937  Representation input_rep = value->representation();
1938  LOperand* reg = UseRegister(value);
1939  if (input_rep.IsDouble()) {
1940  // Revisit this decision, here and 8 lines below.
1941  return DefineAsRegister(new(zone()) LClampDToUint8(reg, FixedTemp(f22)));
1942  } else if (input_rep.IsInteger32()) {
1943  return DefineAsRegister(new(zone()) LClampIToUint8(reg));
1944  } else {
1945  ASSERT(input_rep.IsSmiOrTagged());
1946  // Register allocator doesn't (yet) support allocation of double
1947  // temps. Reserve f22 explicitly.
1948  LClampTToUint8* result = new(zone()) LClampTToUint8(reg, FixedTemp(f22));
1949  return AssignEnvironment(DefineAsRegister(result));
1950  }
1951 }
1952 
1953 
1954 LInstruction* LChunkBuilder::DoDoubleBits(HDoubleBits* instr) {
1955  HValue* value = instr->value();
1956  ASSERT(value->representation().IsDouble());
1957  return DefineAsRegister(new(zone()) LDoubleBits(UseRegister(value)));
1958 }
1959 
1960 
1961 LInstruction* LChunkBuilder::DoConstructDouble(HConstructDouble* instr) {
1962  LOperand* lo = UseRegister(instr->lo());
1963  LOperand* hi = UseRegister(instr->hi());
1964  return DefineAsRegister(new(zone()) LConstructDouble(hi, lo));
1965 }
1966 
1967 
1968 LInstruction* LChunkBuilder::DoReturn(HReturn* instr) {
1969  LOperand* context = info()->IsStub()
1970  ? UseFixed(instr->context(), cp)
1971  : NULL;
1972  LOperand* parameter_count = UseRegisterOrConstant(instr->parameter_count());
1973  return new(zone()) LReturn(UseFixed(instr->value(), v0), context,
1974  parameter_count);
1975 }
1976 
1977 
1978 LInstruction* LChunkBuilder::DoConstant(HConstant* instr) {
1979  Representation r = instr->representation();
1980  if (r.IsSmi()) {
1981  return DefineAsRegister(new(zone()) LConstantS);
1982  } else if (r.IsInteger32()) {
1983  return DefineAsRegister(new(zone()) LConstantI);
1984  } else if (r.IsDouble()) {
1985  return DefineAsRegister(new(zone()) LConstantD);
1986  } else if (r.IsExternal()) {
1987  return DefineAsRegister(new(zone()) LConstantE);
1988  } else if (r.IsTagged()) {
1989  return DefineAsRegister(new(zone()) LConstantT);
1990  } else {
1991  UNREACHABLE();
1992  return NULL;
1993  }
1994 }
1995 
1996 
1997 LInstruction* LChunkBuilder::DoLoadGlobalCell(HLoadGlobalCell* instr) {
1998  LLoadGlobalCell* result = new(zone()) LLoadGlobalCell;
1999  return instr->RequiresHoleCheck()
2000  ? AssignEnvironment(DefineAsRegister(result))
2001  : DefineAsRegister(result);
2002 }
2003 
2004 
2005 LInstruction* LChunkBuilder::DoLoadGlobalGeneric(HLoadGlobalGeneric* instr) {
2006  LOperand* context = UseFixed(instr->context(), cp);
2007  LOperand* global_object = UseFixed(instr->global_object(), a0);
2008  LLoadGlobalGeneric* result =
2009  new(zone()) LLoadGlobalGeneric(context, global_object);
2010  return MarkAsCall(DefineFixed(result, v0), instr);
2011 }
2012 
2013 
2014 LInstruction* LChunkBuilder::DoStoreGlobalCell(HStoreGlobalCell* instr) {
2015  LOperand* value = UseRegister(instr->value());
2016  // Use a temp to check the value in the cell in the case where we perform
2017  // a hole check.
2018  return instr->RequiresHoleCheck()
2019  ? AssignEnvironment(new(zone()) LStoreGlobalCell(value, TempRegister()))
2020  : new(zone()) LStoreGlobalCell(value, NULL);
2021 }
2022 
2023 
2024 LInstruction* LChunkBuilder::DoLoadContextSlot(HLoadContextSlot* instr) {
2025  LOperand* context = UseRegisterAtStart(instr->value());
2026  LInstruction* result =
2027  DefineAsRegister(new(zone()) LLoadContextSlot(context));
2028  return instr->RequiresHoleCheck() ? AssignEnvironment(result) : result;
2029 }
2030 
2031 
2032 LInstruction* LChunkBuilder::DoStoreContextSlot(HStoreContextSlot* instr) {
2033  LOperand* context;
2034  LOperand* value;
2035  if (instr->NeedsWriteBarrier()) {
2036  context = UseTempRegister(instr->context());
2037  value = UseTempRegister(instr->value());
2038  } else {
2039  context = UseRegister(instr->context());
2040  value = UseRegister(instr->value());
2041  }
2042  LInstruction* result = new(zone()) LStoreContextSlot(context, value);
2043  return instr->RequiresHoleCheck() ? AssignEnvironment(result) : result;
2044 }
2045 
2046 
2047 LInstruction* LChunkBuilder::DoLoadNamedField(HLoadNamedField* instr) {
2048  LOperand* obj = UseRegisterAtStart(instr->object());
2049  return DefineAsRegister(new(zone()) LLoadNamedField(obj));
2050 }
2051 
2052 
2053 LInstruction* LChunkBuilder::DoLoadNamedGeneric(HLoadNamedGeneric* instr) {
2054  LOperand* context = UseFixed(instr->context(), cp);
2055  LOperand* object = UseFixed(instr->object(), a0);
2056  LInstruction* result =
2057  DefineFixed(new(zone()) LLoadNamedGeneric(context, object), v0);
2058  return MarkAsCall(result, instr);
2059 }
2060 
2061 
2062 LInstruction* LChunkBuilder::DoLoadFunctionPrototype(
2063  HLoadFunctionPrototype* instr) {
2064  return AssignEnvironment(DefineAsRegister(
2065  new(zone()) LLoadFunctionPrototype(UseRegister(instr->function()))));
2066 }
2067 
2068 
2069 LInstruction* LChunkBuilder::DoLoadRoot(HLoadRoot* instr) {
2070  return DefineAsRegister(new(zone()) LLoadRoot);
2071 }
2072 
2073 
2074 LInstruction* LChunkBuilder::DoLoadKeyed(HLoadKeyed* instr) {
2075  ASSERT(instr->key()->representation().IsSmiOrInteger32());
2076  ElementsKind elements_kind = instr->elements_kind();
2077  LOperand* key = UseRegisterOrConstantAtStart(instr->key());
2078  LLoadKeyed* result = NULL;
2079 
2080  if (!instr->is_typed_elements()) {
2081  LOperand* obj = NULL;
2082  if (instr->representation().IsDouble()) {
2083  obj = UseRegister(instr->elements());
2084  } else {
2085  ASSERT(instr->representation().IsSmiOrTagged());
2086  obj = UseRegisterAtStart(instr->elements());
2087  }
2088  result = new(zone()) LLoadKeyed(obj, key);
2089  } else {
2090  ASSERT(
2091  (instr->representation().IsInteger32() &&
2092  !IsDoubleOrFloatElementsKind(instr->elements_kind())) ||
2093  (instr->representation().IsDouble() &&
2094  IsDoubleOrFloatElementsKind(instr->elements_kind())));
2095  LOperand* backing_store = UseRegister(instr->elements());
2096  result = new(zone()) LLoadKeyed(backing_store, key);
2097  }
2098 
2099  DefineAsRegister(result);
2100  // An unsigned int array load might overflow and cause a deopt, make sure it
2101  // has an environment.
2102  bool can_deoptimize = instr->RequiresHoleCheck() ||
2103  elements_kind == EXTERNAL_UINT32_ELEMENTS ||
2104  elements_kind == UINT32_ELEMENTS;
2105  return can_deoptimize ? AssignEnvironment(result) : result;
2106 }
2107 
2108 
2109 LInstruction* LChunkBuilder::DoLoadKeyedGeneric(HLoadKeyedGeneric* instr) {
2110  LOperand* context = UseFixed(instr->context(), cp);
2111  LOperand* object = UseFixed(instr->object(), a1);
2112  LOperand* key = UseFixed(instr->key(), a0);
2113 
2114  LInstruction* result =
2115  DefineFixed(new(zone()) LLoadKeyedGeneric(context, object, key), v0);
2116  return MarkAsCall(result, instr);
2117 }
2118 
2119 
2120 LInstruction* LChunkBuilder::DoStoreKeyed(HStoreKeyed* instr) {
2121  if (!instr->is_typed_elements()) {
2122  ASSERT(instr->elements()->representation().IsTagged());
2123  bool needs_write_barrier = instr->NeedsWriteBarrier();
2124  LOperand* object = NULL;
2125  LOperand* val = NULL;
2126  LOperand* key = NULL;
2127 
2128  if (instr->value()->representation().IsDouble()) {
2129  object = UseRegisterAtStart(instr->elements());
2130  key = UseRegisterOrConstantAtStart(instr->key());
2131  val = UseRegister(instr->value());
2132  } else {
2133  ASSERT(instr->value()->representation().IsSmiOrTagged());
2134  if (needs_write_barrier) {
2135  object = UseTempRegister(instr->elements());
2136  val = UseTempRegister(instr->value());
2137  key = UseTempRegister(instr->key());
2138  } else {
2139  object = UseRegisterAtStart(instr->elements());
2140  val = UseRegisterAtStart(instr->value());
2141  key = UseRegisterOrConstantAtStart(instr->key());
2142  }
2143  }
2144 
2145  return new(zone()) LStoreKeyed(object, key, val);
2146  }
2147 
2148  ASSERT(
2149  (instr->value()->representation().IsInteger32() &&
2150  !IsDoubleOrFloatElementsKind(instr->elements_kind())) ||
2151  (instr->value()->representation().IsDouble() &&
2152  IsDoubleOrFloatElementsKind(instr->elements_kind())));
2153  ASSERT((instr->is_fixed_typed_array() &&
2154  instr->elements()->representation().IsTagged()) ||
2155  (instr->is_external() &&
2156  instr->elements()->representation().IsExternal()));
2157  LOperand* val = UseRegister(instr->value());
2158  LOperand* key = UseRegisterOrConstantAtStart(instr->key());
2159  LOperand* backing_store = UseRegister(instr->elements());
2160  return new(zone()) LStoreKeyed(backing_store, key, val);
2161 }
2162 
2163 
2164 LInstruction* LChunkBuilder::DoStoreKeyedGeneric(HStoreKeyedGeneric* instr) {
2165  LOperand* context = UseFixed(instr->context(), cp);
2166  LOperand* obj = UseFixed(instr->object(), a2);
2167  LOperand* key = UseFixed(instr->key(), a1);
2168  LOperand* val = UseFixed(instr->value(), a0);
2169 
2170  ASSERT(instr->object()->representation().IsTagged());
2171  ASSERT(instr->key()->representation().IsTagged());
2172  ASSERT(instr->value()->representation().IsTagged());
2173 
2174  return MarkAsCall(
2175  new(zone()) LStoreKeyedGeneric(context, obj, key, val), instr);
2176 }
2177 
2178 
2179 LInstruction* LChunkBuilder::DoTransitionElementsKind(
2180  HTransitionElementsKind* instr) {
2181  LOperand* object = UseRegister(instr->object());
2182  if (IsSimpleMapChangeTransition(instr->from_kind(), instr->to_kind())) {
2183  LOperand* new_map_reg = TempRegister();
2184  LTransitionElementsKind* result =
2185  new(zone()) LTransitionElementsKind(object, NULL, new_map_reg);
2186  return result;
2187  } else {
2188  LOperand* context = UseFixed(instr->context(), cp);
2189  LTransitionElementsKind* result =
2190  new(zone()) LTransitionElementsKind(object, context, NULL);
2191  return AssignPointerMap(result);
2192  }
2193 }
2194 
2195 
2196 LInstruction* LChunkBuilder::DoTrapAllocationMemento(
2197  HTrapAllocationMemento* instr) {
2198  LOperand* object = UseRegister(instr->object());
2199  LOperand* temp = TempRegister();
2200  LTrapAllocationMemento* result =
2201  new(zone()) LTrapAllocationMemento(object, temp);
2202  return AssignEnvironment(result);
2203 }
2204 
2205 
2206 LInstruction* LChunkBuilder::DoStoreNamedField(HStoreNamedField* instr) {
2207  bool is_in_object = instr->access().IsInobject();
2208  bool needs_write_barrier = instr->NeedsWriteBarrier();
2209  bool needs_write_barrier_for_map = instr->has_transition() &&
2210  instr->NeedsWriteBarrierForMap();
2211 
2212  LOperand* obj;
2213  if (needs_write_barrier) {
2214  obj = is_in_object
2215  ? UseRegister(instr->object())
2216  : UseTempRegister(instr->object());
2217  } else {
2218  obj = needs_write_barrier_for_map
2219  ? UseRegister(instr->object())
2220  : UseRegisterAtStart(instr->object());
2221  }
2222 
2223  LOperand* val;
2224  if (needs_write_barrier || instr->field_representation().IsSmi()) {
2225  val = UseTempRegister(instr->value());
2226  } else if (instr->field_representation().IsDouble()) {
2227  val = UseRegisterAtStart(instr->value());
2228  } else {
2229  val = UseRegister(instr->value());
2230  }
2231 
2232  // We need a temporary register for write barrier of the map field.
2233  LOperand* temp = needs_write_barrier_for_map ? TempRegister() : NULL;
2234 
2235  LStoreNamedField* result = new(zone()) LStoreNamedField(obj, val, temp);
2236  if (instr->field_representation().IsHeapObject()) {
2237  if (!instr->value()->type().IsHeapObject()) {
2238  return AssignEnvironment(result);
2239  }
2240  }
2241  return result;
2242 }
2243 
2244 
2245 LInstruction* LChunkBuilder::DoStoreNamedGeneric(HStoreNamedGeneric* instr) {
2246  LOperand* context = UseFixed(instr->context(), cp);
2247  LOperand* obj = UseFixed(instr->object(), a1);
2248  LOperand* val = UseFixed(instr->value(), a0);
2249 
2250  LInstruction* result = new(zone()) LStoreNamedGeneric(context, obj, val);
2251  return MarkAsCall(result, instr);
2252 }
2253 
2254 
2255 LInstruction* LChunkBuilder::DoStringAdd(HStringAdd* instr) {
2256  LOperand* context = UseFixed(instr->context(), cp);
2257  LOperand* left = UseFixed(instr->left(), a1);
2258  LOperand* right = UseFixed(instr->right(), a0);
2259  return MarkAsCall(
2260  DefineFixed(new(zone()) LStringAdd(context, left, right), v0),
2261  instr);
2262 }
2263 
2264 
2265 LInstruction* LChunkBuilder::DoStringCharCodeAt(HStringCharCodeAt* instr) {
2266  LOperand* string = UseTempRegister(instr->string());
2267  LOperand* index = UseTempRegister(instr->index());
2268  LOperand* context = UseAny(instr->context());
2269  LStringCharCodeAt* result =
2270  new(zone()) LStringCharCodeAt(context, string, index);
2271  return AssignEnvironment(AssignPointerMap(DefineAsRegister(result)));
2272 }
2273 
2274 
2275 LInstruction* LChunkBuilder::DoStringCharFromCode(HStringCharFromCode* instr) {
2276  LOperand* char_code = UseRegister(instr->value());
2277  LOperand* context = UseAny(instr->context());
2278  LStringCharFromCode* result =
2279  new(zone()) LStringCharFromCode(context, char_code);
2280  return AssignPointerMap(DefineAsRegister(result));
2281 }
2282 
2283 
2284 LInstruction* LChunkBuilder::DoAllocate(HAllocate* instr) {
2285  info()->MarkAsDeferredCalling();
2286  LOperand* context = UseAny(instr->context());
2287  LOperand* size = instr->size()->IsConstant()
2288  ? UseConstant(instr->size())
2289  : UseTempRegister(instr->size());
2290  LOperand* temp1 = TempRegister();
2291  LOperand* temp2 = TempRegister();
2292  LAllocate* result = new(zone()) LAllocate(context, size, temp1, temp2);
2293  return AssignPointerMap(DefineAsRegister(result));
2294 }
2295 
2296 
2297 LInstruction* LChunkBuilder::DoRegExpLiteral(HRegExpLiteral* instr) {
2298  LOperand* context = UseFixed(instr->context(), cp);
2299  return MarkAsCall(
2300  DefineFixed(new(zone()) LRegExpLiteral(context), v0), instr);
2301 }
2302 
2303 
2304 LInstruction* LChunkBuilder::DoFunctionLiteral(HFunctionLiteral* instr) {
2305  LOperand* context = UseFixed(instr->context(), cp);
2306  return MarkAsCall(
2307  DefineFixed(new(zone()) LFunctionLiteral(context), v0), instr);
2308 }
2309 
2310 
2311 LInstruction* LChunkBuilder::DoOsrEntry(HOsrEntry* instr) {
2312  ASSERT(argument_count_ == 0);
2313  allocator_->MarkAsOsrEntry();
2314  current_block_->last_environment()->set_ast_id(instr->ast_id());
2315  return AssignEnvironment(new(zone()) LOsrEntry);
2316 }
2317 
2318 
2319 LInstruction* LChunkBuilder::DoParameter(HParameter* instr) {
2320  LParameter* result = new(zone()) LParameter;
2321  if (instr->kind() == HParameter::STACK_PARAMETER) {
2322  int spill_index = chunk()->GetParameterStackSlot(instr->index());
2323  return DefineAsSpilled(result, spill_index);
2324  } else {
2325  ASSERT(info()->IsStub());
2326  CodeStubInterfaceDescriptor* descriptor =
2327  info()->code_stub()->GetInterfaceDescriptor(info()->isolate());
2328  int index = static_cast<int>(instr->index());
2329  Register reg = descriptor->GetParameterRegister(index);
2330  return DefineFixed(result, reg);
2331  }
2332 }
2333 
2334 
2335 LInstruction* LChunkBuilder::DoUnknownOSRValue(HUnknownOSRValue* instr) {
2336  // Use an index that corresponds to the location in the unoptimized frame,
2337  // which the optimized frame will subsume.
2338  int env_index = instr->index();
2339  int spill_index = 0;
2340  if (instr->environment()->is_parameter_index(env_index)) {
2341  spill_index = chunk()->GetParameterStackSlot(env_index);
2342  } else {
2343  spill_index = env_index - instr->environment()->first_local_index();
2344  if (spill_index > LUnallocated::kMaxFixedSlotIndex) {
2345  Abort(kTooManySpillSlotsNeededForOSR);
2346  spill_index = 0;
2347  }
2348  }
2349  return DefineAsSpilled(new(zone()) LUnknownOSRValue, spill_index);
2350 }
2351 
2352 
2353 LInstruction* LChunkBuilder::DoCallStub(HCallStub* instr) {
2354  LOperand* context = UseFixed(instr->context(), cp);
2355  return MarkAsCall(DefineFixed(new(zone()) LCallStub(context), v0), instr);
2356 }
2357 
2358 
2359 LInstruction* LChunkBuilder::DoArgumentsObject(HArgumentsObject* instr) {
2360  // There are no real uses of the arguments object.
2361  // arguments.length and element access are supported directly on
2362  // stack arguments, and any real arguments object use causes a bailout.
2363  // So this value is never used.
2364  return NULL;
2365 }
2366 
2367 
2368 LInstruction* LChunkBuilder::DoCapturedObject(HCapturedObject* instr) {
2369  instr->ReplayEnvironment(current_block_->last_environment());
2370 
2371  // There are no real uses of a captured object.
2372  return NULL;
2373 }
2374 
2375 
2376 LInstruction* LChunkBuilder::DoAccessArgumentsAt(HAccessArgumentsAt* instr) {
2377  info()->MarkAsRequiresFrame();
2378  LOperand* args = UseRegister(instr->arguments());
2379  LOperand* length = UseRegisterOrConstantAtStart(instr->length());
2380  LOperand* index = UseRegisterOrConstantAtStart(instr->index());
2381  return DefineAsRegister(new(zone()) LAccessArgumentsAt(args, length, index));
2382 }
2383 
2384 
2385 LInstruction* LChunkBuilder::DoToFastProperties(HToFastProperties* instr) {
2386  LOperand* object = UseFixed(instr->value(), a0);
2387  LToFastProperties* result = new(zone()) LToFastProperties(object);
2388  return MarkAsCall(DefineFixed(result, v0), instr);
2389 }
2390 
2391 
2392 LInstruction* LChunkBuilder::DoTypeof(HTypeof* instr) {
2393  LOperand* context = UseFixed(instr->context(), cp);
2394  LTypeof* result = new(zone()) LTypeof(context, UseFixed(instr->value(), a0));
2395  return MarkAsCall(DefineFixed(result, v0), instr);
2396 }
2397 
2398 
2399 LInstruction* LChunkBuilder::DoTypeofIsAndBranch(HTypeofIsAndBranch* instr) {
2400  LInstruction* goto_instr = CheckElideControlInstruction(instr);
2401  if (goto_instr != NULL) return goto_instr;
2402 
2403  return new(zone()) LTypeofIsAndBranch(UseTempRegister(instr->value()));
2404 }
2405 
2406 
2407 LInstruction* LChunkBuilder::DoIsConstructCallAndBranch(
2408  HIsConstructCallAndBranch* instr) {
2409  return new(zone()) LIsConstructCallAndBranch(TempRegister());
2410 }
2411 
2412 
2413 LInstruction* LChunkBuilder::DoSimulate(HSimulate* instr) {
2414  instr->ReplayEnvironment(current_block_->last_environment());
2415  return NULL;
2416 }
2417 
2418 
2419 LInstruction* LChunkBuilder::DoStackCheck(HStackCheck* instr) {
2420  if (instr->is_function_entry()) {
2421  LOperand* context = UseFixed(instr->context(), cp);
2422  return MarkAsCall(new(zone()) LStackCheck(context), instr);
2423  } else {
2424  ASSERT(instr->is_backwards_branch());
2425  LOperand* context = UseAny(instr->context());
2426  return AssignEnvironment(
2427  AssignPointerMap(new(zone()) LStackCheck(context)));
2428  }
2429 }
2430 
2431 
2432 LInstruction* LChunkBuilder::DoEnterInlined(HEnterInlined* instr) {
2433  HEnvironment* outer = current_block_->last_environment();
2434  HConstant* undefined = graph()->GetConstantUndefined();
2435  HEnvironment* inner = outer->CopyForInlining(instr->closure(),
2436  instr->arguments_count(),
2437  instr->function(),
2438  undefined,
2439  instr->inlining_kind());
2440  // Only replay binding of arguments object if it wasn't removed from graph.
2441  if (instr->arguments_var() != NULL && instr->arguments_object()->IsLinked()) {
2442  inner->Bind(instr->arguments_var(), instr->arguments_object());
2443  }
2444  inner->set_entry(instr);
2445  current_block_->UpdateEnvironment(inner);
2446  chunk_->AddInlinedClosure(instr->closure());
2447  return NULL;
2448 }
2449 
2450 
2451 LInstruction* LChunkBuilder::DoLeaveInlined(HLeaveInlined* instr) {
2452  LInstruction* pop = NULL;
2453 
2454  HEnvironment* env = current_block_->last_environment();
2455 
2456  if (env->entry()->arguments_pushed()) {
2457  int argument_count = env->arguments_environment()->parameter_count();
2458  pop = new(zone()) LDrop(argument_count);
2459  ASSERT(instr->argument_delta() == -argument_count);
2460  }
2461 
2462  HEnvironment* outer = current_block_->last_environment()->
2463  DiscardInlined(false);
2464  current_block_->UpdateEnvironment(outer);
2465 
2466  return pop;
2467 }
2468 
2469 
2470 LInstruction* LChunkBuilder::DoForInPrepareMap(HForInPrepareMap* instr) {
2471  LOperand* context = UseFixed(instr->context(), cp);
2472  LOperand* object = UseFixed(instr->enumerable(), a0);
2473  LForInPrepareMap* result = new(zone()) LForInPrepareMap(context, object);
2474  return MarkAsCall(DefineFixed(result, v0), instr, CAN_DEOPTIMIZE_EAGERLY);
2475 }
2476 
2477 
2478 LInstruction* LChunkBuilder::DoForInCacheArray(HForInCacheArray* instr) {
2479  LOperand* map = UseRegister(instr->map());
2480  return AssignEnvironment(DefineAsRegister(new(zone()) LForInCacheArray(map)));
2481 }
2482 
2483 
2484 LInstruction* LChunkBuilder::DoCheckMapValue(HCheckMapValue* instr) {
2485  LOperand* value = UseRegisterAtStart(instr->value());
2486  LOperand* map = UseRegisterAtStart(instr->map());
2487  return AssignEnvironment(new(zone()) LCheckMapValue(value, map));
2488 }
2489 
2490 
2491 LInstruction* LChunkBuilder::DoLoadFieldByIndex(HLoadFieldByIndex* instr) {
2492  LOperand* object = UseRegister(instr->object());
2493  LOperand* index = UseRegister(instr->index());
2494  return DefineAsRegister(new(zone()) LLoadFieldByIndex(object, index));
2495 }
2496 
2497 
2498 } } // namespace v8::internal
const FPURegister f4
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter NULL
Definition: flags.cc:269
static LUnallocated * cast(LOperand *op)
Definition: lithium.h:156
const char * ToCString(const v8::String::Utf8Value &value)
void PrintDataTo(StringStream *stream) V8_OVERRIDE
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf map
Definition: flags.cc:350
virtual void PrintDataTo(StringStream *stream) V8_OVERRIDE
Definition: lithium-arm.cc:124
static String * cast(Object *obj)
virtual void PrintOutputOperandTo(StringStream *stream)
Definition: lithium-arm.cc:99
const Register cp
const FPURegister f0
const FPURegister f22
int int32_t
Definition: unicode.cc:47
LEnvironment * environment() const
Definition: lithium-arm.h:246
#define ASSERT(condition)
Definition: checks.h:329
virtual const char * Mnemonic() const =0
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:86
bool IsSimpleMapChangeTransition(ElementsKind from_kind, ElementsKind to_kind)
#define LITHIUM_CONCRETE_INSTRUCTION_LIST(V)
Definition: lithium-arm.h:43
virtual LOperand * result() const =0
static const int kMaxFixedSlotIndex
Definition: lithium.h:195
uint32_t additional_index() const
virtual bool HasResult() const =0
#define UNREACHABLE()
Definition: checks.h:52
DwVfpRegister DoubleRegister
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object size
Definition: flags.cc:211
#define DEFINE_COMPILE(type)
Definition: lithium-mips.cc:38
static const char * String(Value tok)
Definition: token.h:294
bool HasEnvironment() const
Definition: lithium-arm.h:247
static int ToAllocationIndex(Register reg)
uint32_t additional_index() const
const FPURegister f2
virtual void PrintTo(StringStream *stream)
Definition: lithium-arm.cc:67
LPointerMap * pointer_map() const
Definition: lithium-arm.h:250
const char * ElementsKindToString(ElementsKind kind)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function info
Definition: flags.cc:317
void PrintDataTo(StringStream *stream) V8_OVERRIDE
static int ToAllocationIndex(DwVfpRegister reg)
bool IsDoubleOrFloatElementsKind(ElementsKind kind)
const FPURegister f6
HeapObject * obj
bool HasPointerMap() const
Definition: lithium-arm.h:251
static Representation Tagged()
bool IsRedundant() const
Definition: lithium-arm.cc:113
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in name
Definition: flags.cc:505
static HValue * cast(HValue *value)
void PrintTo(StringStream *stream)
Definition: lithium.cc:55
const FPURegister f8