v8  3.25.30(node0.11.13)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
lithium-arm.cc
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "v8.h"
29 
30 #include "lithium-allocator-inl.h"
31 #include "arm/lithium-arm.h"
33 #include "hydrogen-osr.h"
34 
35 namespace v8 {
36 namespace internal {
37 
38 #define DEFINE_COMPILE(type) \
39  void L##type::CompileToNative(LCodeGen* generator) { \
40  generator->Do##type(this); \
41  }
43 #undef DEFINE_COMPILE
44 
45 #ifdef DEBUG
46 void LInstruction::VerifyCall() {
47  // Call instructions can use only fixed registers as temporaries and
48  // outputs because all registers are blocked by the calling convention.
49  // Inputs operands must use a fixed register or use-at-start policy or
50  // a non-register policy.
51  ASSERT(Output() == NULL ||
52  LUnallocated::cast(Output())->HasFixedPolicy() ||
53  !LUnallocated::cast(Output())->HasRegisterPolicy());
54  for (UseIterator it(this); !it.Done(); it.Advance()) {
55  LUnallocated* operand = LUnallocated::cast(it.Current());
56  ASSERT(operand->HasFixedPolicy() ||
57  operand->IsUsedAtStart());
58  }
59  for (TempIterator it(this); !it.Done(); it.Advance()) {
60  LUnallocated* operand = LUnallocated::cast(it.Current());
61  ASSERT(operand->HasFixedPolicy() ||!operand->HasRegisterPolicy());
62  }
63 }
64 #endif
65 
66 
68  stream->Add("%s ", this->Mnemonic());
69 
70  PrintOutputOperandTo(stream);
71 
72  PrintDataTo(stream);
73 
74  if (HasEnvironment()) {
75  stream->Add(" ");
76  environment()->PrintTo(stream);
77  }
78 
79  if (HasPointerMap()) {
80  stream->Add(" ");
81  pointer_map()->PrintTo(stream);
82  }
83 }
84 
85 
87  stream->Add("= ");
88  for (int i = 0; i < InputCount(); i++) {
89  if (i > 0) stream->Add(" ");
90  if (InputAt(i) == NULL) {
91  stream->Add("NULL");
92  } else {
93  InputAt(i)->PrintTo(stream);
94  }
95  }
96 }
97 
98 
100  if (HasResult()) result()->PrintTo(stream);
101 }
102 
103 
104 void LLabel::PrintDataTo(StringStream* stream) {
105  LGap::PrintDataTo(stream);
106  LLabel* rep = replacement();
107  if (rep != NULL) {
108  stream->Add(" Dead block replaced with B%d", rep->block_id());
109  }
110 }
111 
112 
113 bool LGap::IsRedundant() const {
114  for (int i = 0; i < 4; i++) {
115  if (parallel_moves_[i] != NULL && !parallel_moves_[i]->IsRedundant()) {
116  return false;
117  }
118  }
119 
120  return true;
121 }
122 
123 
125  for (int i = 0; i < 4; i++) {
126  stream->Add("(");
127  if (parallel_moves_[i] != NULL) {
128  parallel_moves_[i]->PrintDataTo(stream);
129  }
130  stream->Add(") ");
131  }
132 }
133 
134 
135 const char* LArithmeticD::Mnemonic() const {
136  switch (op()) {
137  case Token::ADD: return "add-d";
138  case Token::SUB: return "sub-d";
139  case Token::MUL: return "mul-d";
140  case Token::DIV: return "div-d";
141  case Token::MOD: return "mod-d";
142  default:
143  UNREACHABLE();
144  return NULL;
145  }
146 }
147 
148 
149 const char* LArithmeticT::Mnemonic() const {
150  switch (op()) {
151  case Token::ADD: return "add-t";
152  case Token::SUB: return "sub-t";
153  case Token::MUL: return "mul-t";
154  case Token::MOD: return "mod-t";
155  case Token::DIV: return "div-t";
156  case Token::BIT_AND: return "bit-and-t";
157  case Token::BIT_OR: return "bit-or-t";
158  case Token::BIT_XOR: return "bit-xor-t";
159  case Token::ROR: return "ror-t";
160  case Token::SHL: return "shl-t";
161  case Token::SAR: return "sar-t";
162  case Token::SHR: return "shr-t";
163  default:
164  UNREACHABLE();
165  return NULL;
166  }
167 }
168 
169 
170 bool LGoto::HasInterestingComment(LCodeGen* gen) const {
171  return !gen->IsNextEmittedBlock(block_id());
172 }
173 
174 
175 void LGoto::PrintDataTo(StringStream* stream) {
176  stream->Add("B%d", block_id());
177 }
178 
179 
180 void LBranch::PrintDataTo(StringStream* stream) {
181  stream->Add("B%d | B%d on ", true_block_id(), false_block_id());
182  value()->PrintTo(stream);
183 }
184 
185 
186 void LCompareNumericAndBranch::PrintDataTo(StringStream* stream) {
187  stream->Add("if ");
188  left()->PrintTo(stream);
189  stream->Add(" %s ", Token::String(op()));
190  right()->PrintTo(stream);
191  stream->Add(" then B%d else B%d", true_block_id(), false_block_id());
192 }
193 
194 
195 void LIsObjectAndBranch::PrintDataTo(StringStream* stream) {
196  stream->Add("if is_object(");
197  value()->PrintTo(stream);
198  stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
199 }
200 
201 
202 void LIsStringAndBranch::PrintDataTo(StringStream* stream) {
203  stream->Add("if is_string(");
204  value()->PrintTo(stream);
205  stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
206 }
207 
208 
209 void LIsSmiAndBranch::PrintDataTo(StringStream* stream) {
210  stream->Add("if is_smi(");
211  value()->PrintTo(stream);
212  stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
213 }
214 
215 
216 void LIsUndetectableAndBranch::PrintDataTo(StringStream* stream) {
217  stream->Add("if is_undetectable(");
218  value()->PrintTo(stream);
219  stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
220 }
221 
222 
223 void LStringCompareAndBranch::PrintDataTo(StringStream* stream) {
224  stream->Add("if string_compare(");
225  left()->PrintTo(stream);
226  right()->PrintTo(stream);
227  stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
228 }
229 
230 
231 void LHasInstanceTypeAndBranch::PrintDataTo(StringStream* stream) {
232  stream->Add("if has_instance_type(");
233  value()->PrintTo(stream);
234  stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
235 }
236 
237 
238 void LHasCachedArrayIndexAndBranch::PrintDataTo(StringStream* stream) {
239  stream->Add("if has_cached_array_index(");
240  value()->PrintTo(stream);
241  stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
242 }
243 
244 
245 void LClassOfTestAndBranch::PrintDataTo(StringStream* stream) {
246  stream->Add("if class_of_test(");
247  value()->PrintTo(stream);
248  stream->Add(", \"%o\") then B%d else B%d",
249  *hydrogen()->class_name(),
250  true_block_id(),
251  false_block_id());
252 }
253 
254 
255 void LTypeofIsAndBranch::PrintDataTo(StringStream* stream) {
256  stream->Add("if typeof ");
257  value()->PrintTo(stream);
258  stream->Add(" == \"%s\" then B%d else B%d",
259  hydrogen()->type_literal()->ToCString().get(),
260  true_block_id(), false_block_id());
261 }
262 
263 
264 void LStoreCodeEntry::PrintDataTo(StringStream* stream) {
265  stream->Add(" = ");
266  function()->PrintTo(stream);
267  stream->Add(".code_entry = ");
268  code_object()->PrintTo(stream);
269 }
270 
271 
272 void LInnerAllocatedObject::PrintDataTo(StringStream* stream) {
273  stream->Add(" = ");
274  base_object()->PrintTo(stream);
275  stream->Add(" + ");
276  offset()->PrintTo(stream);
277 }
278 
279 
280 void LCallJSFunction::PrintDataTo(StringStream* stream) {
281  stream->Add("= ");
282  function()->PrintTo(stream);
283  stream->Add("#%d / ", arity());
284 }
285 
286 
287 void LCallWithDescriptor::PrintDataTo(StringStream* stream) {
288  for (int i = 0; i < InputCount(); i++) {
289  InputAt(i)->PrintTo(stream);
290  stream->Add(" ");
291  }
292  stream->Add("#%d / ", arity());
293 }
294 
295 
296 void LLoadContextSlot::PrintDataTo(StringStream* stream) {
297  context()->PrintTo(stream);
298  stream->Add("[%d]", slot_index());
299 }
300 
301 
302 void LStoreContextSlot::PrintDataTo(StringStream* stream) {
303  context()->PrintTo(stream);
304  stream->Add("[%d] <- ", slot_index());
305  value()->PrintTo(stream);
306 }
307 
308 
309 void LInvokeFunction::PrintDataTo(StringStream* stream) {
310  stream->Add("= ");
311  function()->PrintTo(stream);
312  stream->Add(" #%d / ", arity());
313 }
314 
315 
316 void LCallNew::PrintDataTo(StringStream* stream) {
317  stream->Add("= ");
318  constructor()->PrintTo(stream);
319  stream->Add(" #%d / ", arity());
320 }
321 
322 
323 void LCallNewArray::PrintDataTo(StringStream* stream) {
324  stream->Add("= ");
325  constructor()->PrintTo(stream);
326  stream->Add(" #%d / ", arity());
327  ElementsKind kind = hydrogen()->elements_kind();
328  stream->Add(" (%s) ", ElementsKindToString(kind));
329 }
330 
331 
332 void LAccessArgumentsAt::PrintDataTo(StringStream* stream) {
333  arguments()->PrintTo(stream);
334  stream->Add(" length ");
335  length()->PrintTo(stream);
336  stream->Add(" index ");
337  index()->PrintTo(stream);
338 }
339 
340 
341 void LStoreNamedField::PrintDataTo(StringStream* stream) {
342  object()->PrintTo(stream);
343  hydrogen()->access().PrintTo(stream);
344  stream->Add(" <- ");
345  value()->PrintTo(stream);
346 }
347 
348 
349 void LStoreNamedGeneric::PrintDataTo(StringStream* stream) {
350  object()->PrintTo(stream);
351  stream->Add(".");
352  stream->Add(String::cast(*name())->ToCString().get());
353  stream->Add(" <- ");
354  value()->PrintTo(stream);
355 }
356 
357 
358 void LLoadKeyed::PrintDataTo(StringStream* stream) {
359  elements()->PrintTo(stream);
360  stream->Add("[");
361  key()->PrintTo(stream);
362  if (hydrogen()->IsDehoisted()) {
363  stream->Add(" + %d]", additional_index());
364  } else {
365  stream->Add("]");
366  }
367 }
368 
369 
370 void LStoreKeyed::PrintDataTo(StringStream* stream) {
371  elements()->PrintTo(stream);
372  stream->Add("[");
373  key()->PrintTo(stream);
374  if (hydrogen()->IsDehoisted()) {
375  stream->Add(" + %d] <-", additional_index());
376  } else {
377  stream->Add("] <- ");
378  }
379 
380  if (value() == NULL) {
381  ASSERT(hydrogen()->IsConstantHoleStore() &&
382  hydrogen()->value()->representation().IsDouble());
383  stream->Add("<the hole(nan)>");
384  } else {
385  value()->PrintTo(stream);
386  }
387 }
388 
389 
390 void LStoreKeyedGeneric::PrintDataTo(StringStream* stream) {
391  object()->PrintTo(stream);
392  stream->Add("[");
393  key()->PrintTo(stream);
394  stream->Add("] <- ");
395  value()->PrintTo(stream);
396 }
397 
398 
399 void LTransitionElementsKind::PrintDataTo(StringStream* stream) {
400  object()->PrintTo(stream);
401  stream->Add(" %p -> %p", *original_map(), *transitioned_map());
402 }
403 
404 
405 int LPlatformChunk::GetNextSpillIndex(RegisterKind kind) {
406  // Skip a slot if for a double-width slot.
407  if (kind == DOUBLE_REGISTERS) spill_slot_count_++;
408  return spill_slot_count_++;
409 }
410 
411 
412 LOperand* LPlatformChunk::GetNextSpillSlot(RegisterKind kind) {
413  int index = GetNextSpillIndex(kind);
414  if (kind == DOUBLE_REGISTERS) {
415  return LDoubleStackSlot::Create(index, zone());
416  } else {
417  ASSERT(kind == GENERAL_REGISTERS);
418  return LStackSlot::Create(index, zone());
419  }
420 }
421 
422 
423 LPlatformChunk* LChunkBuilder::Build() {
424  ASSERT(is_unused());
425  chunk_ = new(zone()) LPlatformChunk(info(), graph());
426  LPhase phase("L_Building chunk", chunk_);
427  status_ = BUILDING;
428 
429  // If compiling for OSR, reserve space for the unoptimized frame,
430  // which will be subsumed into this frame.
431  if (graph()->has_osr()) {
432  for (int i = graph()->osr()->UnoptimizedFrameSlots(); i > 0; i--) {
433  chunk_->GetNextSpillIndex(GENERAL_REGISTERS);
434  }
435  }
436 
437  const ZoneList<HBasicBlock*>* blocks = graph()->blocks();
438  for (int i = 0; i < blocks->length(); i++) {
439  HBasicBlock* next = NULL;
440  if (i < blocks->length() - 1) next = blocks->at(i + 1);
441  DoBasicBlock(blocks->at(i), next);
442  if (is_aborted()) return NULL;
443  }
444  status_ = DONE;
445  return chunk_;
446 }
447 
448 
449 void LChunkBuilder::Abort(BailoutReason reason) {
450  info()->set_bailout_reason(reason);
451  status_ = ABORTED;
452 }
453 
454 
455 LUnallocated* LChunkBuilder::ToUnallocated(Register reg) {
456  return new(zone()) LUnallocated(LUnallocated::FIXED_REGISTER,
458 }
459 
460 
461 LUnallocated* LChunkBuilder::ToUnallocated(DoubleRegister reg) {
462  return new(zone()) LUnallocated(LUnallocated::FIXED_DOUBLE_REGISTER,
464 }
465 
466 
467 LOperand* LChunkBuilder::UseFixed(HValue* value, Register fixed_register) {
468  return Use(value, ToUnallocated(fixed_register));
469 }
470 
471 
472 LOperand* LChunkBuilder::UseFixedDouble(HValue* value, DoubleRegister reg) {
473  return Use(value, ToUnallocated(reg));
474 }
475 
476 
477 LOperand* LChunkBuilder::UseRegister(HValue* value) {
478  return Use(value, new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER));
479 }
480 
481 
482 LOperand* LChunkBuilder::UseRegisterAtStart(HValue* value) {
483  return Use(value,
484  new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER,
486 }
487 
488 
489 LOperand* LChunkBuilder::UseTempRegister(HValue* value) {
490  return Use(value, new(zone()) LUnallocated(LUnallocated::WRITABLE_REGISTER));
491 }
492 
493 
494 LOperand* LChunkBuilder::Use(HValue* value) {
495  return Use(value, new(zone()) LUnallocated(LUnallocated::NONE));
496 }
497 
498 
499 LOperand* LChunkBuilder::UseAtStart(HValue* value) {
500  return Use(value, new(zone()) LUnallocated(LUnallocated::NONE,
502 }
503 
504 
505 LOperand* LChunkBuilder::UseOrConstant(HValue* value) {
506  return value->IsConstant()
507  ? chunk_->DefineConstantOperand(HConstant::cast(value))
508  : Use(value);
509 }
510 
511 
512 LOperand* LChunkBuilder::UseOrConstantAtStart(HValue* value) {
513  return value->IsConstant()
514  ? chunk_->DefineConstantOperand(HConstant::cast(value))
515  : UseAtStart(value);
516 }
517 
518 
519 LOperand* LChunkBuilder::UseRegisterOrConstant(HValue* value) {
520  return value->IsConstant()
521  ? chunk_->DefineConstantOperand(HConstant::cast(value))
522  : UseRegister(value);
523 }
524 
525 
526 LOperand* LChunkBuilder::UseRegisterOrConstantAtStart(HValue* value) {
527  return value->IsConstant()
528  ? chunk_->DefineConstantOperand(HConstant::cast(value))
529  : UseRegisterAtStart(value);
530 }
531 
532 
533 LOperand* LChunkBuilder::UseConstant(HValue* value) {
534  return chunk_->DefineConstantOperand(HConstant::cast(value));
535 }
536 
537 
538 LOperand* LChunkBuilder::UseAny(HValue* value) {
539  return value->IsConstant()
540  ? chunk_->DefineConstantOperand(HConstant::cast(value))
541  : Use(value, new(zone()) LUnallocated(LUnallocated::ANY));
542 }
543 
544 
545 LOperand* LChunkBuilder::Use(HValue* value, LUnallocated* operand) {
546  if (value->EmitAtUses()) {
547  HInstruction* instr = HInstruction::cast(value);
548  VisitInstruction(instr);
549  }
550  operand->set_virtual_register(value->id());
551  return operand;
552 }
553 
554 
555 LInstruction* LChunkBuilder::Define(LTemplateResultInstruction<1>* instr,
556  LUnallocated* result) {
557  result->set_virtual_register(current_instruction_->id());
558  instr->set_result(result);
559  return instr;
560 }
561 
562 
563 LInstruction* LChunkBuilder::DefineAsRegister(
564  LTemplateResultInstruction<1>* instr) {
565  return Define(instr,
566  new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER));
567 }
568 
569 
570 LInstruction* LChunkBuilder::DefineAsSpilled(
571  LTemplateResultInstruction<1>* instr, int index) {
572  return Define(instr,
573  new(zone()) LUnallocated(LUnallocated::FIXED_SLOT, index));
574 }
575 
576 
577 LInstruction* LChunkBuilder::DefineSameAsFirst(
578  LTemplateResultInstruction<1>* instr) {
579  return Define(instr,
580  new(zone()) LUnallocated(LUnallocated::SAME_AS_FIRST_INPUT));
581 }
582 
583 
584 LInstruction* LChunkBuilder::DefineFixed(
585  LTemplateResultInstruction<1>* instr, Register reg) {
586  return Define(instr, ToUnallocated(reg));
587 }
588 
589 
590 LInstruction* LChunkBuilder::DefineFixedDouble(
591  LTemplateResultInstruction<1>* instr, DoubleRegister reg) {
592  return Define(instr, ToUnallocated(reg));
593 }
594 
595 
596 LInstruction* LChunkBuilder::AssignEnvironment(LInstruction* instr) {
597  HEnvironment* hydrogen_env = current_block_->last_environment();
598  int argument_index_accumulator = 0;
599  ZoneList<HValue*> objects_to_materialize(0, zone());
600  instr->set_environment(CreateEnvironment(hydrogen_env,
601  &argument_index_accumulator,
602  &objects_to_materialize));
603  return instr;
604 }
605 
606 
607 LInstruction* LChunkBuilder::MarkAsCall(LInstruction* instr,
608  HInstruction* hinstr,
609  CanDeoptimize can_deoptimize) {
610  info()->MarkAsNonDeferredCalling();
611 #ifdef DEBUG
612  instr->VerifyCall();
613 #endif
614  instr->MarkAsCall();
615  instr = AssignPointerMap(instr);
616 
617  // If instruction does not have side-effects lazy deoptimization
618  // after the call will try to deoptimize to the point before the call.
619  // Thus we still need to attach environment to this call even if
620  // call sequence can not deoptimize eagerly.
621  bool needs_environment =
622  (can_deoptimize == CAN_DEOPTIMIZE_EAGERLY) ||
623  !hinstr->HasObservableSideEffects();
624  if (needs_environment && !instr->HasEnvironment()) {
625  instr = AssignEnvironment(instr);
626  }
627 
628  return instr;
629 }
630 
631 
632 LInstruction* LChunkBuilder::AssignPointerMap(LInstruction* instr) {
633  ASSERT(!instr->HasPointerMap());
634  instr->set_pointer_map(new(zone()) LPointerMap(zone()));
635  return instr;
636 }
637 
638 
639 LUnallocated* LChunkBuilder::TempRegister() {
640  LUnallocated* operand =
641  new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER);
642  int vreg = allocator_->GetVirtualRegister();
643  if (!allocator_->AllocationOk()) {
644  Abort(kOutOfVirtualRegistersWhileTryingToAllocateTempRegister);
645  vreg = 0;
646  }
647  operand->set_virtual_register(vreg);
648  return operand;
649 }
650 
651 
652 LOperand* LChunkBuilder::FixedTemp(Register reg) {
653  LUnallocated* operand = ToUnallocated(reg);
654  ASSERT(operand->HasFixedPolicy());
655  return operand;
656 }
657 
658 
659 LOperand* LChunkBuilder::FixedTemp(DoubleRegister reg) {
660  LUnallocated* operand = ToUnallocated(reg);
661  ASSERT(operand->HasFixedPolicy());
662  return operand;
663 }
664 
665 
666 LInstruction* LChunkBuilder::DoBlockEntry(HBlockEntry* instr) {
667  return new(zone()) LLabel(instr->block());
668 }
669 
670 
671 LInstruction* LChunkBuilder::DoDummyUse(HDummyUse* instr) {
672  return DefineAsRegister(new(zone()) LDummyUse(UseAny(instr->value())));
673 }
674 
675 
676 LInstruction* LChunkBuilder::DoEnvironmentMarker(HEnvironmentMarker* instr) {
677  UNREACHABLE();
678  return NULL;
679 }
680 
681 
682 LInstruction* LChunkBuilder::DoDeoptimize(HDeoptimize* instr) {
683  return AssignEnvironment(new(zone()) LDeoptimize);
684 }
685 
686 
687 LInstruction* LChunkBuilder::DoShift(Token::Value op,
688  HBitwiseBinaryOperation* instr) {
689  if (instr->representation().IsSmiOrInteger32()) {
690  ASSERT(instr->left()->representation().Equals(instr->representation()));
691  ASSERT(instr->right()->representation().Equals(instr->representation()));
692  LOperand* left = UseRegisterAtStart(instr->left());
693 
694  HValue* right_value = instr->right();
695  LOperand* right = NULL;
696  int constant_value = 0;
697  bool does_deopt = false;
698  if (right_value->IsConstant()) {
699  HConstant* constant = HConstant::cast(right_value);
700  right = chunk_->DefineConstantOperand(constant);
701  constant_value = constant->Integer32Value() & 0x1f;
702  // Left shifts can deoptimize if we shift by > 0 and the result cannot be
703  // truncated to smi.
704  if (instr->representation().IsSmi() && constant_value > 0) {
705  does_deopt = !instr->CheckUsesForFlag(HValue::kTruncatingToSmi);
706  }
707  } else {
708  right = UseRegisterAtStart(right_value);
709  }
710 
711  // Shift operations can only deoptimize if we do a logical shift
712  // by 0 and the result cannot be truncated to int32.
713  if (op == Token::SHR && constant_value == 0) {
714  if (FLAG_opt_safe_uint32_operations) {
715  does_deopt = !instr->CheckFlag(HInstruction::kUint32);
716  } else {
717  does_deopt = !instr->CheckUsesForFlag(HValue::kTruncatingToInt32);
718  }
719  }
720 
721  LInstruction* result =
722  DefineAsRegister(new(zone()) LShiftI(op, left, right, does_deopt));
723  return does_deopt ? AssignEnvironment(result) : result;
724  } else {
725  return DoArithmeticT(op, instr);
726  }
727 }
728 
729 
730 LInstruction* LChunkBuilder::DoArithmeticD(Token::Value op,
731  HArithmeticBinaryOperation* instr) {
732  ASSERT(instr->representation().IsDouble());
733  ASSERT(instr->left()->representation().IsDouble());
734  ASSERT(instr->right()->representation().IsDouble());
735  if (op == Token::MOD) {
736  LOperand* left = UseFixedDouble(instr->left(), d0);
737  LOperand* right = UseFixedDouble(instr->right(), d1);
738  LArithmeticD* result = new(zone()) LArithmeticD(op, left, right);
739  return MarkAsCall(DefineFixedDouble(result, d0), instr);
740  } else {
741  LOperand* left = UseRegisterAtStart(instr->left());
742  LOperand* right = UseRegisterAtStart(instr->right());
743  LArithmeticD* result = new(zone()) LArithmeticD(op, left, right);
744  return DefineAsRegister(result);
745  }
746 }
747 
748 
749 LInstruction* LChunkBuilder::DoArithmeticT(Token::Value op,
750  HBinaryOperation* instr) {
751  HValue* left = instr->left();
752  HValue* right = instr->right();
753  ASSERT(left->representation().IsTagged());
754  ASSERT(right->representation().IsTagged());
755  LOperand* context = UseFixed(instr->context(), cp);
756  LOperand* left_operand = UseFixed(left, r1);
757  LOperand* right_operand = UseFixed(right, r0);
758  LArithmeticT* result =
759  new(zone()) LArithmeticT(op, context, left_operand, right_operand);
760  return MarkAsCall(DefineFixed(result, r0), instr);
761 }
762 
763 
764 void LChunkBuilder::DoBasicBlock(HBasicBlock* block, HBasicBlock* next_block) {
765  ASSERT(is_building());
766  current_block_ = block;
767  next_block_ = next_block;
768  if (block->IsStartBlock()) {
769  block->UpdateEnvironment(graph_->start_environment());
770  argument_count_ = 0;
771  } else if (block->predecessors()->length() == 1) {
772  // We have a single predecessor => copy environment and outgoing
773  // argument count from the predecessor.
774  ASSERT(block->phis()->length() == 0);
775  HBasicBlock* pred = block->predecessors()->at(0);
776  HEnvironment* last_environment = pred->last_environment();
777  ASSERT(last_environment != NULL);
778  // Only copy the environment, if it is later used again.
779  if (pred->end()->SecondSuccessor() == NULL) {
780  ASSERT(pred->end()->FirstSuccessor() == block);
781  } else {
782  if (pred->end()->FirstSuccessor()->block_id() > block->block_id() ||
783  pred->end()->SecondSuccessor()->block_id() > block->block_id()) {
784  last_environment = last_environment->Copy();
785  }
786  }
787  block->UpdateEnvironment(last_environment);
788  ASSERT(pred->argument_count() >= 0);
789  argument_count_ = pred->argument_count();
790  } else {
791  // We are at a state join => process phis.
792  HBasicBlock* pred = block->predecessors()->at(0);
793  // No need to copy the environment, it cannot be used later.
794  HEnvironment* last_environment = pred->last_environment();
795  for (int i = 0; i < block->phis()->length(); ++i) {
796  HPhi* phi = block->phis()->at(i);
797  if (phi->HasMergedIndex()) {
798  last_environment->SetValueAt(phi->merged_index(), phi);
799  }
800  }
801  for (int i = 0; i < block->deleted_phis()->length(); ++i) {
802  if (block->deleted_phis()->at(i) < last_environment->length()) {
803  last_environment->SetValueAt(block->deleted_phis()->at(i),
804  graph_->GetConstantUndefined());
805  }
806  }
807  block->UpdateEnvironment(last_environment);
808  // Pick up the outgoing argument count of one of the predecessors.
809  argument_count_ = pred->argument_count();
810  }
811  HInstruction* current = block->first();
812  int start = chunk_->instructions()->length();
813  while (current != NULL && !is_aborted()) {
814  // Code for constants in registers is generated lazily.
815  if (!current->EmitAtUses()) {
816  VisitInstruction(current);
817  }
818  current = current->next();
819  }
820  int end = chunk_->instructions()->length() - 1;
821  if (end >= start) {
822  block->set_first_instruction_index(start);
823  block->set_last_instruction_index(end);
824  }
825  block->set_argument_count(argument_count_);
826  next_block_ = NULL;
827  current_block_ = NULL;
828 }
829 
830 
831 void LChunkBuilder::VisitInstruction(HInstruction* current) {
832  HInstruction* old_current = current_instruction_;
833  current_instruction_ = current;
834 
835  LInstruction* instr = NULL;
836  if (current->CanReplaceWithDummyUses()) {
837  if (current->OperandCount() == 0) {
838  instr = DefineAsRegister(new(zone()) LDummy());
839  } else {
840  ASSERT(!current->OperandAt(0)->IsControlInstruction());
841  instr = DefineAsRegister(new(zone())
842  LDummyUse(UseAny(current->OperandAt(0))));
843  }
844  for (int i = 1; i < current->OperandCount(); ++i) {
845  if (current->OperandAt(i)->IsControlInstruction()) continue;
846  LInstruction* dummy =
847  new(zone()) LDummyUse(UseAny(current->OperandAt(i)));
848  dummy->set_hydrogen_value(current);
849  chunk_->AddInstruction(dummy, current_block_);
850  }
851  } else {
852  instr = current->CompileToLithium(this);
853  }
854 
855  argument_count_ += current->argument_delta();
856  ASSERT(argument_count_ >= 0);
857 
858  if (instr != NULL) {
859  // Associate the hydrogen instruction first, since we may need it for
860  // the ClobbersRegisters() or ClobbersDoubleRegisters() calls below.
861  instr->set_hydrogen_value(current);
862 
863 #if DEBUG
864  // Make sure that the lithium instruction has either no fixed register
865  // constraints in temps or the result OR no uses that are only used at
866  // start. If this invariant doesn't hold, the register allocator can decide
867  // to insert a split of a range immediately before the instruction due to an
868  // already allocated register needing to be used for the instruction's fixed
869  // register constraint. In this case, The register allocator won't see an
870  // interference between the split child and the use-at-start (it would if
871  // the it was just a plain use), so it is free to move the split child into
872  // the same register that is used for the use-at-start.
873  // See https://code.google.com/p/chromium/issues/detail?id=201590
874  if (!(instr->ClobbersRegisters() && instr->ClobbersDoubleRegisters())) {
875  int fixed = 0;
876  int used_at_start = 0;
877  for (UseIterator it(instr); !it.Done(); it.Advance()) {
878  LUnallocated* operand = LUnallocated::cast(it.Current());
879  if (operand->IsUsedAtStart()) ++used_at_start;
880  }
881  if (instr->Output() != NULL) {
882  if (LUnallocated::cast(instr->Output())->HasFixedPolicy()) ++fixed;
883  }
884  for (TempIterator it(instr); !it.Done(); it.Advance()) {
885  LUnallocated* operand = LUnallocated::cast(it.Current());
886  if (operand->HasFixedPolicy()) ++fixed;
887  }
888  ASSERT(fixed == 0 || used_at_start == 0);
889  }
890 #endif
891 
892  if (FLAG_stress_pointer_maps && !instr->HasPointerMap()) {
893  instr = AssignPointerMap(instr);
894  }
895  if (FLAG_stress_environments && !instr->HasEnvironment()) {
896  instr = AssignEnvironment(instr);
897  }
898  chunk_->AddInstruction(instr, current_block_);
899 
900  if (instr->IsCall()) {
901  HValue* hydrogen_value_for_lazy_bailout = current;
902  LInstruction* instruction_needing_environment = NULL;
903  if (current->HasObservableSideEffects()) {
904  HSimulate* sim = HSimulate::cast(current->next());
905  instruction_needing_environment = instr;
906  sim->ReplayEnvironment(current_block_->last_environment());
907  hydrogen_value_for_lazy_bailout = sim;
908  }
909  LInstruction* bailout = AssignEnvironment(new(zone()) LLazyBailout());
910  bailout->set_hydrogen_value(hydrogen_value_for_lazy_bailout);
911  chunk_->AddInstruction(bailout, current_block_);
912  if (instruction_needing_environment != NULL) {
913  // Store the lazy deopt environment with the instruction if needed.
914  // Right now it is only used for LInstanceOfKnownGlobal.
915  instruction_needing_environment->
916  SetDeferredLazyDeoptimizationEnvironment(bailout->environment());
917  }
918  }
919  }
920  current_instruction_ = old_current;
921 }
922 
923 
924 LInstruction* LChunkBuilder::DoGoto(HGoto* instr) {
925  return new(zone()) LGoto(instr->FirstSuccessor());
926 }
927 
928 
929 LInstruction* LChunkBuilder::DoBranch(HBranch* instr) {
930  LInstruction* goto_instr = CheckElideControlInstruction(instr);
931  if (goto_instr != NULL) return goto_instr;
932 
933  HValue* value = instr->value();
934  LBranch* result = new(zone()) LBranch(UseRegister(value));
935  // Tagged values that are not known smis or booleans require a
936  // deoptimization environment. If the instruction is generic no
937  // environment is needed since all cases are handled.
938  Representation rep = value->representation();
939  HType type = value->type();
940  ToBooleanStub::Types expected = instr->expected_input_types();
941  if (rep.IsTagged() && !type.IsSmi() && !type.IsBoolean() &&
942  !expected.IsGeneric()) {
943  return AssignEnvironment(result);
944  }
945  return result;
946 }
947 
948 
949 LInstruction* LChunkBuilder::DoDebugBreak(HDebugBreak* instr) {
950  return new(zone()) LDebugBreak();
951 }
952 
953 
954 LInstruction* LChunkBuilder::DoCompareMap(HCompareMap* instr) {
955  LInstruction* goto_instr = CheckElideControlInstruction(instr);
956  if (goto_instr != NULL) return goto_instr;
957 
958  ASSERT(instr->value()->representation().IsTagged());
959  LOperand* value = UseRegisterAtStart(instr->value());
960  LOperand* temp = TempRegister();
961  return new(zone()) LCmpMapAndBranch(value, temp);
962 }
963 
964 
965 LInstruction* LChunkBuilder::DoArgumentsLength(HArgumentsLength* instr) {
966  info()->MarkAsRequiresFrame();
967  LOperand* value = UseRegister(instr->value());
968  return DefineAsRegister(new(zone()) LArgumentsLength(value));
969 }
970 
971 
972 LInstruction* LChunkBuilder::DoArgumentsElements(HArgumentsElements* elems) {
973  info()->MarkAsRequiresFrame();
974  return DefineAsRegister(new(zone()) LArgumentsElements);
975 }
976 
977 
978 LInstruction* LChunkBuilder::DoInstanceOf(HInstanceOf* instr) {
979  LOperand* context = UseFixed(instr->context(), cp);
980  LInstanceOf* result =
981  new(zone()) LInstanceOf(context, UseFixed(instr->left(), r0),
982  UseFixed(instr->right(), r1));
983  return MarkAsCall(DefineFixed(result, r0), instr);
984 }
985 
986 
987 LInstruction* LChunkBuilder::DoInstanceOfKnownGlobal(
988  HInstanceOfKnownGlobal* instr) {
989  LInstanceOfKnownGlobal* result =
990  new(zone()) LInstanceOfKnownGlobal(
991  UseFixed(instr->context(), cp),
992  UseFixed(instr->left(), r0),
993  FixedTemp(r4));
994  return MarkAsCall(DefineFixed(result, r0), instr);
995 }
996 
997 
998 LInstruction* LChunkBuilder::DoWrapReceiver(HWrapReceiver* instr) {
999  LOperand* receiver = UseRegisterAtStart(instr->receiver());
1000  LOperand* function = UseRegisterAtStart(instr->function());
1001  LWrapReceiver* result = new(zone()) LWrapReceiver(receiver, function);
1002  return AssignEnvironment(DefineAsRegister(result));
1003 }
1004 
1005 
1006 LInstruction* LChunkBuilder::DoApplyArguments(HApplyArguments* instr) {
1007  LOperand* function = UseFixed(instr->function(), r1);
1008  LOperand* receiver = UseFixed(instr->receiver(), r0);
1009  LOperand* length = UseFixed(instr->length(), r2);
1010  LOperand* elements = UseFixed(instr->elements(), r3);
1011  LApplyArguments* result = new(zone()) LApplyArguments(function,
1012  receiver,
1013  length,
1014  elements);
1015  return MarkAsCall(DefineFixed(result, r0), instr, CAN_DEOPTIMIZE_EAGERLY);
1016 }
1017 
1018 
1019 LInstruction* LChunkBuilder::DoPushArgument(HPushArgument* instr) {
1020  LOperand* argument = Use(instr->argument());
1021  return new(zone()) LPushArgument(argument);
1022 }
1023 
1024 
1025 LInstruction* LChunkBuilder::DoStoreCodeEntry(
1026  HStoreCodeEntry* store_code_entry) {
1027  LOperand* function = UseRegister(store_code_entry->function());
1028  LOperand* code_object = UseTempRegister(store_code_entry->code_object());
1029  return new(zone()) LStoreCodeEntry(function, code_object);
1030 }
1031 
1032 
1033 LInstruction* LChunkBuilder::DoInnerAllocatedObject(
1034  HInnerAllocatedObject* instr) {
1035  LOperand* base_object = UseRegisterAtStart(instr->base_object());
1036  LOperand* offset = UseRegisterOrConstantAtStart(instr->offset());
1037  return DefineAsRegister(
1038  new(zone()) LInnerAllocatedObject(base_object, offset));
1039 }
1040 
1041 
1042 LInstruction* LChunkBuilder::DoThisFunction(HThisFunction* instr) {
1043  return instr->HasNoUses()
1044  ? NULL
1045  : DefineAsRegister(new(zone()) LThisFunction);
1046 }
1047 
1048 
1049 LInstruction* LChunkBuilder::DoContext(HContext* instr) {
1050  if (instr->HasNoUses()) return NULL;
1051 
1052  if (info()->IsStub()) {
1053  return DefineFixed(new(zone()) LContext, cp);
1054  }
1055 
1056  return DefineAsRegister(new(zone()) LContext);
1057 }
1058 
1059 
1060 LInstruction* LChunkBuilder::DoDeclareGlobals(HDeclareGlobals* instr) {
1061  LOperand* context = UseFixed(instr->context(), cp);
1062  return MarkAsCall(new(zone()) LDeclareGlobals(context), instr);
1063 }
1064 
1065 
1066 LInstruction* LChunkBuilder::DoCallJSFunction(
1067  HCallJSFunction* instr) {
1068  LOperand* function = UseFixed(instr->function(), r1);
1069 
1070  LCallJSFunction* result = new(zone()) LCallJSFunction(function);
1071 
1072  return MarkAsCall(DefineFixed(result, r0), instr);
1073 }
1074 
1075 
1076 LInstruction* LChunkBuilder::DoCallWithDescriptor(
1077  HCallWithDescriptor* instr) {
1078  const CallInterfaceDescriptor* descriptor = instr->descriptor();
1079 
1080  LOperand* target = UseRegisterOrConstantAtStart(instr->target());
1081  ZoneList<LOperand*> ops(instr->OperandCount(), zone());
1082  ops.Add(target, zone());
1083  for (int i = 1; i < instr->OperandCount(); i++) {
1084  LOperand* op = UseFixed(instr->OperandAt(i),
1085  descriptor->GetParameterRegister(i - 1));
1086  ops.Add(op, zone());
1087  }
1088 
1089  LCallWithDescriptor* result = new(zone()) LCallWithDescriptor(
1090  descriptor, ops, zone());
1091  return MarkAsCall(DefineFixed(result, r0), instr);
1092 }
1093 
1094 
1095 LInstruction* LChunkBuilder::DoInvokeFunction(HInvokeFunction* instr) {
1096  LOperand* context = UseFixed(instr->context(), cp);
1097  LOperand* function = UseFixed(instr->function(), r1);
1098  LInvokeFunction* result = new(zone()) LInvokeFunction(context, function);
1099  return MarkAsCall(DefineFixed(result, r0), instr, CANNOT_DEOPTIMIZE_EAGERLY);
1100 }
1101 
1102 
1103 LInstruction* LChunkBuilder::DoUnaryMathOperation(HUnaryMathOperation* instr) {
1104  switch (instr->op()) {
1105  case kMathFloor: return DoMathFloor(instr);
1106  case kMathRound: return DoMathRound(instr);
1107  case kMathAbs: return DoMathAbs(instr);
1108  case kMathLog: return DoMathLog(instr);
1109  case kMathExp: return DoMathExp(instr);
1110  case kMathSqrt: return DoMathSqrt(instr);
1111  case kMathPowHalf: return DoMathPowHalf(instr);
1112  case kMathClz32: return DoMathClz32(instr);
1113  default:
1114  UNREACHABLE();
1115  return NULL;
1116  }
1117 }
1118 
1119 
1120 LInstruction* LChunkBuilder::DoMathFloor(HUnaryMathOperation* instr) {
1121  LOperand* input = UseRegister(instr->value());
1122  LMathFloor* result = new(zone()) LMathFloor(input);
1123  return AssignEnvironment(AssignPointerMap(DefineAsRegister(result)));
1124 }
1125 
1126 
1127 LInstruction* LChunkBuilder::DoMathRound(HUnaryMathOperation* instr) {
1128  LOperand* input = UseRegister(instr->value());
1129  LOperand* temp = FixedTemp(d3);
1130  LMathRound* result = new(zone()) LMathRound(input, temp);
1131  return AssignEnvironment(DefineAsRegister(result));
1132 }
1133 
1134 
1135 LInstruction* LChunkBuilder::DoMathAbs(HUnaryMathOperation* instr) {
1136  Representation r = instr->value()->representation();
1137  LOperand* context = (r.IsDouble() || r.IsSmiOrInteger32())
1138  ? NULL
1139  : UseFixed(instr->context(), cp);
1140  LOperand* input = UseRegister(instr->value());
1141  LMathAbs* result = new(zone()) LMathAbs(context, input);
1142  return AssignEnvironment(AssignPointerMap(DefineAsRegister(result)));
1143 }
1144 
1145 
1146 LInstruction* LChunkBuilder::DoMathLog(HUnaryMathOperation* instr) {
1147  ASSERT(instr->representation().IsDouble());
1148  ASSERT(instr->value()->representation().IsDouble());
1149  LOperand* input = UseFixedDouble(instr->value(), d0);
1150  return MarkAsCall(DefineFixedDouble(new(zone()) LMathLog(input), d0), instr);
1151 }
1152 
1153 
1154 LInstruction* LChunkBuilder::DoMathClz32(HUnaryMathOperation* instr) {
1155  LOperand* input = UseRegisterAtStart(instr->value());
1156  LMathClz32* result = new(zone()) LMathClz32(input);
1157  return DefineAsRegister(result);
1158 }
1159 
1160 
1161 LInstruction* LChunkBuilder::DoMathExp(HUnaryMathOperation* instr) {
1162  ASSERT(instr->representation().IsDouble());
1163  ASSERT(instr->value()->representation().IsDouble());
1164  LOperand* input = UseRegister(instr->value());
1165  LOperand* temp1 = TempRegister();
1166  LOperand* temp2 = TempRegister();
1167  LOperand* double_temp = FixedTemp(d3); // Chosen by fair dice roll.
1168  LMathExp* result = new(zone()) LMathExp(input, double_temp, temp1, temp2);
1169  return DefineAsRegister(result);
1170 }
1171 
1172 
1173 LInstruction* LChunkBuilder::DoMathSqrt(HUnaryMathOperation* instr) {
1174  LOperand* input = UseRegisterAtStart(instr->value());
1175  LMathSqrt* result = new(zone()) LMathSqrt(input);
1176  return DefineAsRegister(result);
1177 }
1178 
1179 
1180 LInstruction* LChunkBuilder::DoMathPowHalf(HUnaryMathOperation* instr) {
1181  LOperand* input = UseRegisterAtStart(instr->value());
1182  LMathPowHalf* result = new(zone()) LMathPowHalf(input);
1183  return DefineAsRegister(result);
1184 }
1185 
1186 
1187 LInstruction* LChunkBuilder::DoCallNew(HCallNew* instr) {
1188  LOperand* context = UseFixed(instr->context(), cp);
1189  LOperand* constructor = UseFixed(instr->constructor(), r1);
1190  LCallNew* result = new(zone()) LCallNew(context, constructor);
1191  return MarkAsCall(DefineFixed(result, r0), instr);
1192 }
1193 
1194 
1195 LInstruction* LChunkBuilder::DoCallNewArray(HCallNewArray* instr) {
1196  LOperand* context = UseFixed(instr->context(), cp);
1197  LOperand* constructor = UseFixed(instr->constructor(), r1);
1198  LCallNewArray* result = new(zone()) LCallNewArray(context, constructor);
1199  return MarkAsCall(DefineFixed(result, r0), instr);
1200 }
1201 
1202 
1203 LInstruction* LChunkBuilder::DoCallFunction(HCallFunction* instr) {
1204  LOperand* context = UseFixed(instr->context(), cp);
1205  LOperand* function = UseFixed(instr->function(), r1);
1206  LCallFunction* call = new(zone()) LCallFunction(context, function);
1207  return MarkAsCall(DefineFixed(call, r0), instr);
1208 }
1209 
1210 
1211 LInstruction* LChunkBuilder::DoCallRuntime(HCallRuntime* instr) {
1212  LOperand* context = UseFixed(instr->context(), cp);
1213  return MarkAsCall(DefineFixed(new(zone()) LCallRuntime(context), r0), instr);
1214 }
1215 
1216 
1217 LInstruction* LChunkBuilder::DoRor(HRor* instr) {
1218  return DoShift(Token::ROR, instr);
1219 }
1220 
1221 
1222 LInstruction* LChunkBuilder::DoShr(HShr* instr) {
1223  return DoShift(Token::SHR, instr);
1224 }
1225 
1226 
1227 LInstruction* LChunkBuilder::DoSar(HSar* instr) {
1228  return DoShift(Token::SAR, instr);
1229 }
1230 
1231 
1232 LInstruction* LChunkBuilder::DoShl(HShl* instr) {
1233  return DoShift(Token::SHL, instr);
1234 }
1235 
1236 
1237 LInstruction* LChunkBuilder::DoBitwise(HBitwise* instr) {
1238  if (instr->representation().IsSmiOrInteger32()) {
1239  ASSERT(instr->left()->representation().Equals(instr->representation()));
1240  ASSERT(instr->right()->representation().Equals(instr->representation()));
1241  ASSERT(instr->CheckFlag(HValue::kTruncatingToInt32));
1242 
1243  LOperand* left = UseRegisterAtStart(instr->BetterLeftOperand());
1244  LOperand* right = UseOrConstantAtStart(instr->BetterRightOperand());
1245  return DefineAsRegister(new(zone()) LBitI(left, right));
1246  } else {
1247  return DoArithmeticT(instr->op(), instr);
1248  }
1249 }
1250 
1251 
1252 LInstruction* LChunkBuilder::DoDivByPowerOf2I(HDiv* instr) {
1253  ASSERT(instr->representation().IsSmiOrInteger32());
1254  ASSERT(instr->left()->representation().Equals(instr->representation()));
1255  ASSERT(instr->right()->representation().Equals(instr->representation()));
1256  LOperand* dividend = UseRegister(instr->left());
1257  int32_t divisor = instr->right()->GetInteger32Constant();
1258  LInstruction* result = DefineAsRegister(new(zone()) LDivByPowerOf2I(
1259  dividend, divisor));
1260  if ((instr->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) ||
1261  (instr->CheckFlag(HValue::kCanOverflow) && divisor == -1) ||
1262  (!instr->CheckFlag(HInstruction::kAllUsesTruncatingToInt32) &&
1263  divisor != 1 && divisor != -1)) {
1264  result = AssignEnvironment(result);
1265  }
1266  return result;
1267 }
1268 
1269 
1270 LInstruction* LChunkBuilder::DoDivByConstI(HDiv* instr) {
1271  ASSERT(instr->representation().IsInteger32());
1272  ASSERT(instr->left()->representation().Equals(instr->representation()));
1273  ASSERT(instr->right()->representation().Equals(instr->representation()));
1274  LOperand* dividend = UseRegister(instr->left());
1275  int32_t divisor = instr->right()->GetInteger32Constant();
1276  LInstruction* result = DefineAsRegister(new(zone()) LDivByConstI(
1277  dividend, divisor));
1278  if (divisor == 0 ||
1279  (instr->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) ||
1280  !instr->CheckFlag(HInstruction::kAllUsesTruncatingToInt32)) {
1281  result = AssignEnvironment(result);
1282  }
1283  return result;
1284 }
1285 
1286 
1287 LInstruction* LChunkBuilder::DoDivI(HBinaryOperation* instr) {
1288  ASSERT(instr->representation().IsSmiOrInteger32());
1289  ASSERT(instr->left()->representation().Equals(instr->representation()));
1290  ASSERT(instr->right()->representation().Equals(instr->representation()));
1291  LOperand* dividend = UseRegister(instr->left());
1292  LOperand* divisor = UseRegister(instr->right());
1293  LOperand* temp = CpuFeatures::IsSupported(SUDIV) ? NULL : FixedTemp(d4);
1294  LDivI* div = new(zone()) LDivI(dividend, divisor, temp);
1295  return AssignEnvironment(DefineAsRegister(div));
1296 }
1297 
1298 
1299 LInstruction* LChunkBuilder::DoDiv(HDiv* instr) {
1300  if (instr->representation().IsSmiOrInteger32()) {
1301  if (instr->RightIsPowerOf2()) {
1302  return DoDivByPowerOf2I(instr);
1303  } else if (instr->right()->IsConstant()) {
1304  return DoDivByConstI(instr);
1305  } else {
1306  return DoDivI(instr);
1307  }
1308  } else if (instr->representation().IsDouble()) {
1309  return DoArithmeticD(Token::DIV, instr);
1310  } else {
1311  return DoArithmeticT(Token::DIV, instr);
1312  }
1313 }
1314 
1315 
1316 LInstruction* LChunkBuilder::DoFlooringDivByPowerOf2I(HMathFloorOfDiv* instr) {
1317  LOperand* dividend = UseRegisterAtStart(instr->left());
1318  int32_t divisor = instr->right()->GetInteger32Constant();
1319  LInstruction* result = DefineAsRegister(new(zone()) LFlooringDivByPowerOf2I(
1320  dividend, divisor));
1321  if ((instr->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) ||
1322  (instr->CheckFlag(HValue::kLeftCanBeMinInt) && divisor == -1)) {
1323  result = AssignEnvironment(result);
1324  }
1325  return result;
1326 }
1327 
1328 
1329 LInstruction* LChunkBuilder::DoFlooringDivByConstI(HMathFloorOfDiv* instr) {
1330  ASSERT(instr->representation().IsInteger32());
1331  ASSERT(instr->left()->representation().Equals(instr->representation()));
1332  ASSERT(instr->right()->representation().Equals(instr->representation()));
1333  LOperand* dividend = UseRegister(instr->left());
1334  int32_t divisor = instr->right()->GetInteger32Constant();
1335  LOperand* temp =
1336  ((divisor > 0 && !instr->CheckFlag(HValue::kLeftCanBeNegative)) ||
1337  (divisor < 0 && !instr->CheckFlag(HValue::kLeftCanBePositive))) ?
1338  NULL : TempRegister();
1339  LInstruction* result = DefineAsRegister(
1340  new(zone()) LFlooringDivByConstI(dividend, divisor, temp));
1341  if (divisor == 0 ||
1342  (instr->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0)) {
1343  result = AssignEnvironment(result);
1344  }
1345  return result;
1346 }
1347 
1348 
1349 LInstruction* LChunkBuilder::DoMathFloorOfDiv(HMathFloorOfDiv* instr) {
1350  if (instr->RightIsPowerOf2()) {
1351  return DoFlooringDivByPowerOf2I(instr);
1352  } else if (instr->right()->IsConstant()) {
1353  return DoFlooringDivByConstI(instr);
1354  } else {
1355  return DoDivI(instr);
1356  }
1357 }
1358 
1359 
1360 LInstruction* LChunkBuilder::DoModByPowerOf2I(HMod* instr) {
1361  ASSERT(instr->representation().IsSmiOrInteger32());
1362  ASSERT(instr->left()->representation().Equals(instr->representation()));
1363  ASSERT(instr->right()->representation().Equals(instr->representation()));
1364  LOperand* dividend = UseRegisterAtStart(instr->left());
1365  int32_t divisor = instr->right()->GetInteger32Constant();
1366  LInstruction* result = DefineSameAsFirst(new(zone()) LModByPowerOf2I(
1367  dividend, divisor));
1368  if (instr->CheckFlag(HValue::kBailoutOnMinusZero)) {
1369  result = AssignEnvironment(result);
1370  }
1371  return result;
1372 }
1373 
1374 
1375 LInstruction* LChunkBuilder::DoModByConstI(HMod* instr) {
1376  ASSERT(instr->representation().IsSmiOrInteger32());
1377  ASSERT(instr->left()->representation().Equals(instr->representation()));
1378  ASSERT(instr->right()->representation().Equals(instr->representation()));
1379  LOperand* dividend = UseRegister(instr->left());
1380  int32_t divisor = instr->right()->GetInteger32Constant();
1381  LInstruction* result = DefineAsRegister(new(zone()) LModByConstI(
1382  dividend, divisor));
1383  if (divisor == 0 || instr->CheckFlag(HValue::kBailoutOnMinusZero)) {
1384  result = AssignEnvironment(result);
1385  }
1386  return result;
1387 }
1388 
1389 
1390 LInstruction* LChunkBuilder::DoModI(HMod* instr) {
1391  ASSERT(instr->representation().IsSmiOrInteger32());
1392  ASSERT(instr->left()->representation().Equals(instr->representation()));
1393  ASSERT(instr->right()->representation().Equals(instr->representation()));
1394  LOperand* dividend = UseRegister(instr->left());
1395  LOperand* divisor = UseRegister(instr->right());
1396  LOperand* temp = CpuFeatures::IsSupported(SUDIV) ? NULL : FixedTemp(d10);
1397  LOperand* temp2 = CpuFeatures::IsSupported(SUDIV) ? NULL : FixedTemp(d11);
1398  LInstruction* result = DefineAsRegister(new(zone()) LModI(
1399  dividend, divisor, temp, temp2));
1400  if (instr->CheckFlag(HValue::kCanBeDivByZero) ||
1401  instr->CheckFlag(HValue::kBailoutOnMinusZero)) {
1402  result = AssignEnvironment(result);
1403  }
1404  return result;
1405 }
1406 
1407 
1408 LInstruction* LChunkBuilder::DoMod(HMod* instr) {
1409  if (instr->representation().IsSmiOrInteger32()) {
1410  if (instr->RightIsPowerOf2()) {
1411  return DoModByPowerOf2I(instr);
1412  } else if (instr->right()->IsConstant()) {
1413  return DoModByConstI(instr);
1414  } else {
1415  return DoModI(instr);
1416  }
1417  } else if (instr->representation().IsDouble()) {
1418  return DoArithmeticD(Token::MOD, instr);
1419  } else {
1420  return DoArithmeticT(Token::MOD, instr);
1421  }
1422 }
1423 
1424 
1425 LInstruction* LChunkBuilder::DoMul(HMul* instr) {
1426  if (instr->representation().IsSmiOrInteger32()) {
1427  ASSERT(instr->left()->representation().Equals(instr->representation()));
1428  ASSERT(instr->right()->representation().Equals(instr->representation()));
1429  HValue* left = instr->BetterLeftOperand();
1430  HValue* right = instr->BetterRightOperand();
1431  LOperand* left_op;
1432  LOperand* right_op;
1433  bool can_overflow = instr->CheckFlag(HValue::kCanOverflow);
1434  bool bailout_on_minus_zero = instr->CheckFlag(HValue::kBailoutOnMinusZero);
1435 
1436  if (right->IsConstant()) {
1437  HConstant* constant = HConstant::cast(right);
1438  int32_t constant_value = constant->Integer32Value();
1439  // Constants -1, 0 and 1 can be optimized if the result can overflow.
1440  // For other constants, it can be optimized only without overflow.
1441  if (!can_overflow || ((constant_value >= -1) && (constant_value <= 1))) {
1442  left_op = UseRegisterAtStart(left);
1443  right_op = UseConstant(right);
1444  } else {
1445  if (bailout_on_minus_zero) {
1446  left_op = UseRegister(left);
1447  } else {
1448  left_op = UseRegisterAtStart(left);
1449  }
1450  right_op = UseRegister(right);
1451  }
1452  } else {
1453  if (bailout_on_minus_zero) {
1454  left_op = UseRegister(left);
1455  } else {
1456  left_op = UseRegisterAtStart(left);
1457  }
1458  right_op = UseRegister(right);
1459  }
1460  LMulI* mul = new(zone()) LMulI(left_op, right_op);
1461  if (can_overflow || bailout_on_minus_zero) {
1462  AssignEnvironment(mul);
1463  }
1464  return DefineAsRegister(mul);
1465 
1466  } else if (instr->representation().IsDouble()) {
1467  if (instr->UseCount() == 1 && (instr->uses().value()->IsAdd() ||
1468  instr->uses().value()->IsSub())) {
1469  HBinaryOperation* use = HBinaryOperation::cast(instr->uses().value());
1470 
1471  if (use->IsAdd() && instr == use->left()) {
1472  // This mul is the lhs of an add. The add and mul will be folded into a
1473  // multiply-add in DoAdd.
1474  return NULL;
1475  }
1476  if (instr == use->right() && use->IsAdd() && !use->left()->IsMul()) {
1477  // This mul is the rhs of an add, where the lhs is not another mul.
1478  // The add and mul will be folded into a multiply-add in DoAdd.
1479  return NULL;
1480  }
1481  if (instr == use->right() && use->IsSub()) {
1482  // This mul is the rhs of a sub. The sub and mul will be folded into a
1483  // multiply-sub in DoSub.
1484  return NULL;
1485  }
1486  }
1487 
1488  return DoArithmeticD(Token::MUL, instr);
1489  } else {
1490  return DoArithmeticT(Token::MUL, instr);
1491  }
1492 }
1493 
1494 
1495 LInstruction* LChunkBuilder::DoSub(HSub* instr) {
1496  if (instr->representation().IsSmiOrInteger32()) {
1497  ASSERT(instr->left()->representation().Equals(instr->representation()));
1498  ASSERT(instr->right()->representation().Equals(instr->representation()));
1499 
1500  if (instr->left()->IsConstant()) {
1501  // If lhs is constant, do reverse subtraction instead.
1502  return DoRSub(instr);
1503  }
1504 
1505  LOperand* left = UseRegisterAtStart(instr->left());
1506  LOperand* right = UseOrConstantAtStart(instr->right());
1507  LSubI* sub = new(zone()) LSubI(left, right);
1508  LInstruction* result = DefineAsRegister(sub);
1509  if (instr->CheckFlag(HValue::kCanOverflow)) {
1510  result = AssignEnvironment(result);
1511  }
1512  return result;
1513  } else if (instr->representation().IsDouble()) {
1514  if (instr->right()->IsMul()) {
1515  return DoMultiplySub(instr->left(), HMul::cast(instr->right()));
1516  }
1517 
1518  return DoArithmeticD(Token::SUB, instr);
1519  } else {
1520  return DoArithmeticT(Token::SUB, instr);
1521  }
1522 }
1523 
1524 
1525 LInstruction* LChunkBuilder::DoRSub(HSub* instr) {
1526  ASSERT(instr->representation().IsSmiOrInteger32());
1527  ASSERT(instr->left()->representation().Equals(instr->representation()));
1528  ASSERT(instr->right()->representation().Equals(instr->representation()));
1529 
1530  // Note: The lhs of the subtraction becomes the rhs of the
1531  // reverse-subtraction.
1532  LOperand* left = UseRegisterAtStart(instr->right());
1533  LOperand* right = UseOrConstantAtStart(instr->left());
1534  LRSubI* rsb = new(zone()) LRSubI(left, right);
1535  LInstruction* result = DefineAsRegister(rsb);
1536  if (instr->CheckFlag(HValue::kCanOverflow)) {
1537  result = AssignEnvironment(result);
1538  }
1539  return result;
1540 }
1541 
1542 
1543 LInstruction* LChunkBuilder::DoMultiplyAdd(HMul* mul, HValue* addend) {
1544  LOperand* multiplier_op = UseRegisterAtStart(mul->left());
1545  LOperand* multiplicand_op = UseRegisterAtStart(mul->right());
1546  LOperand* addend_op = UseRegisterAtStart(addend);
1547  return DefineSameAsFirst(new(zone()) LMultiplyAddD(addend_op, multiplier_op,
1548  multiplicand_op));
1549 }
1550 
1551 
1552 LInstruction* LChunkBuilder::DoMultiplySub(HValue* minuend, HMul* mul) {
1553  LOperand* minuend_op = UseRegisterAtStart(minuend);
1554  LOperand* multiplier_op = UseRegisterAtStart(mul->left());
1555  LOperand* multiplicand_op = UseRegisterAtStart(mul->right());
1556 
1557  return DefineSameAsFirst(new(zone()) LMultiplySubD(minuend_op,
1558  multiplier_op,
1559  multiplicand_op));
1560 }
1561 
1562 
1563 LInstruction* LChunkBuilder::DoAdd(HAdd* instr) {
1564  if (instr->representation().IsSmiOrInteger32()) {
1565  ASSERT(instr->left()->representation().Equals(instr->representation()));
1566  ASSERT(instr->right()->representation().Equals(instr->representation()));
1567  LOperand* left = UseRegisterAtStart(instr->BetterLeftOperand());
1568  LOperand* right = UseOrConstantAtStart(instr->BetterRightOperand());
1569  LAddI* add = new(zone()) LAddI(left, right);
1570  LInstruction* result = DefineAsRegister(add);
1571  if (instr->CheckFlag(HValue::kCanOverflow)) {
1572  result = AssignEnvironment(result);
1573  }
1574  return result;
1575  } else if (instr->representation().IsExternal()) {
1576  ASSERT(instr->left()->representation().IsExternal());
1577  ASSERT(instr->right()->representation().IsInteger32());
1578  ASSERT(!instr->CheckFlag(HValue::kCanOverflow));
1579  LOperand* left = UseRegisterAtStart(instr->left());
1580  LOperand* right = UseOrConstantAtStart(instr->right());
1581  LAddI* add = new(zone()) LAddI(left, right);
1582  LInstruction* result = DefineAsRegister(add);
1583  return result;
1584  } else if (instr->representation().IsDouble()) {
1585  if (instr->left()->IsMul()) {
1586  return DoMultiplyAdd(HMul::cast(instr->left()), instr->right());
1587  }
1588 
1589  if (instr->right()->IsMul()) {
1590  ASSERT(!instr->left()->IsMul());
1591  return DoMultiplyAdd(HMul::cast(instr->right()), instr->left());
1592  }
1593 
1594  return DoArithmeticD(Token::ADD, instr);
1595  } else {
1596  return DoArithmeticT(Token::ADD, instr);
1597  }
1598 }
1599 
1600 
1601 LInstruction* LChunkBuilder::DoMathMinMax(HMathMinMax* instr) {
1602  LOperand* left = NULL;
1603  LOperand* right = NULL;
1604  if (instr->representation().IsSmiOrInteger32()) {
1605  ASSERT(instr->left()->representation().Equals(instr->representation()));
1606  ASSERT(instr->right()->representation().Equals(instr->representation()));
1607  left = UseRegisterAtStart(instr->BetterLeftOperand());
1608  right = UseOrConstantAtStart(instr->BetterRightOperand());
1609  } else {
1610  ASSERT(instr->representation().IsDouble());
1611  ASSERT(instr->left()->representation().IsDouble());
1612  ASSERT(instr->right()->representation().IsDouble());
1613  left = UseRegisterAtStart(instr->left());
1614  right = UseRegisterAtStart(instr->right());
1615  }
1616  return DefineAsRegister(new(zone()) LMathMinMax(left, right));
1617 }
1618 
1619 
1620 LInstruction* LChunkBuilder::DoPower(HPower* instr) {
1621  ASSERT(instr->representation().IsDouble());
1622  // We call a C function for double power. It can't trigger a GC.
1623  // We need to use fixed result register for the call.
1624  Representation exponent_type = instr->right()->representation();
1625  ASSERT(instr->left()->representation().IsDouble());
1626  LOperand* left = UseFixedDouble(instr->left(), d0);
1627  LOperand* right = exponent_type.IsDouble() ?
1628  UseFixedDouble(instr->right(), d1) :
1629  UseFixed(instr->right(), r2);
1630  LPower* result = new(zone()) LPower(left, right);
1631  return MarkAsCall(DefineFixedDouble(result, d2),
1632  instr,
1633  CAN_DEOPTIMIZE_EAGERLY);
1634 }
1635 
1636 
1637 LInstruction* LChunkBuilder::DoCompareGeneric(HCompareGeneric* instr) {
1638  ASSERT(instr->left()->representation().IsTagged());
1639  ASSERT(instr->right()->representation().IsTagged());
1640  LOperand* context = UseFixed(instr->context(), cp);
1641  LOperand* left = UseFixed(instr->left(), r1);
1642  LOperand* right = UseFixed(instr->right(), r0);
1643  LCmpT* result = new(zone()) LCmpT(context, left, right);
1644  return MarkAsCall(DefineFixed(result, r0), instr);
1645 }
1646 
1647 
1648 LInstruction* LChunkBuilder::DoCompareNumericAndBranch(
1649  HCompareNumericAndBranch* instr) {
1650  Representation r = instr->representation();
1651  if (r.IsSmiOrInteger32()) {
1652  ASSERT(instr->left()->representation().Equals(r));
1653  ASSERT(instr->right()->representation().Equals(r));
1654  LOperand* left = UseRegisterOrConstantAtStart(instr->left());
1655  LOperand* right = UseRegisterOrConstantAtStart(instr->right());
1656  return new(zone()) LCompareNumericAndBranch(left, right);
1657  } else {
1658  ASSERT(r.IsDouble());
1659  ASSERT(instr->left()->representation().IsDouble());
1660  ASSERT(instr->right()->representation().IsDouble());
1661  LOperand* left = UseRegisterAtStart(instr->left());
1662  LOperand* right = UseRegisterAtStart(instr->right());
1663  return new(zone()) LCompareNumericAndBranch(left, right);
1664  }
1665 }
1666 
1667 
1668 LInstruction* LChunkBuilder::DoCompareObjectEqAndBranch(
1669  HCompareObjectEqAndBranch* instr) {
1670  LInstruction* goto_instr = CheckElideControlInstruction(instr);
1671  if (goto_instr != NULL) return goto_instr;
1672  LOperand* left = UseRegisterAtStart(instr->left());
1673  LOperand* right = UseRegisterAtStart(instr->right());
1674  return new(zone()) LCmpObjectEqAndBranch(left, right);
1675 }
1676 
1677 
1678 LInstruction* LChunkBuilder::DoCompareHoleAndBranch(
1679  HCompareHoleAndBranch* instr) {
1680  LOperand* value = UseRegisterAtStart(instr->value());
1681  return new(zone()) LCmpHoleAndBranch(value);
1682 }
1683 
1684 
1685 LInstruction* LChunkBuilder::DoCompareMinusZeroAndBranch(
1686  HCompareMinusZeroAndBranch* instr) {
1687  LInstruction* goto_instr = CheckElideControlInstruction(instr);
1688  if (goto_instr != NULL) return goto_instr;
1689  LOperand* value = UseRegister(instr->value());
1690  LOperand* scratch = TempRegister();
1691  return new(zone()) LCompareMinusZeroAndBranch(value, scratch);
1692 }
1693 
1694 
1695 LInstruction* LChunkBuilder::DoIsObjectAndBranch(HIsObjectAndBranch* instr) {
1696  ASSERT(instr->value()->representation().IsTagged());
1697  LOperand* value = UseRegisterAtStart(instr->value());
1698  LOperand* temp = TempRegister();
1699  return new(zone()) LIsObjectAndBranch(value, temp);
1700 }
1701 
1702 
1703 LInstruction* LChunkBuilder::DoIsStringAndBranch(HIsStringAndBranch* instr) {
1704  ASSERT(instr->value()->representation().IsTagged());
1705  LOperand* value = UseRegisterAtStart(instr->value());
1706  LOperand* temp = TempRegister();
1707  return new(zone()) LIsStringAndBranch(value, temp);
1708 }
1709 
1710 
1711 LInstruction* LChunkBuilder::DoIsSmiAndBranch(HIsSmiAndBranch* instr) {
1712  ASSERT(instr->value()->representation().IsTagged());
1713  return new(zone()) LIsSmiAndBranch(Use(instr->value()));
1714 }
1715 
1716 
1717 LInstruction* LChunkBuilder::DoIsUndetectableAndBranch(
1718  HIsUndetectableAndBranch* instr) {
1719  ASSERT(instr->value()->representation().IsTagged());
1720  LOperand* value = UseRegisterAtStart(instr->value());
1721  return new(zone()) LIsUndetectableAndBranch(value, TempRegister());
1722 }
1723 
1724 
1725 LInstruction* LChunkBuilder::DoStringCompareAndBranch(
1726  HStringCompareAndBranch* instr) {
1727  ASSERT(instr->left()->representation().IsTagged());
1728  ASSERT(instr->right()->representation().IsTagged());
1729  LOperand* context = UseFixed(instr->context(), cp);
1730  LOperand* left = UseFixed(instr->left(), r1);
1731  LOperand* right = UseFixed(instr->right(), r0);
1732  LStringCompareAndBranch* result =
1733  new(zone()) LStringCompareAndBranch(context, left, right);
1734  return MarkAsCall(result, instr);
1735 }
1736 
1737 
1738 LInstruction* LChunkBuilder::DoHasInstanceTypeAndBranch(
1739  HHasInstanceTypeAndBranch* instr) {
1740  ASSERT(instr->value()->representation().IsTagged());
1741  LOperand* value = UseRegisterAtStart(instr->value());
1742  return new(zone()) LHasInstanceTypeAndBranch(value);
1743 }
1744 
1745 
1746 LInstruction* LChunkBuilder::DoGetCachedArrayIndex(
1747  HGetCachedArrayIndex* instr) {
1748  ASSERT(instr->value()->representation().IsTagged());
1749  LOperand* value = UseRegisterAtStart(instr->value());
1750 
1751  return DefineAsRegister(new(zone()) LGetCachedArrayIndex(value));
1752 }
1753 
1754 
1755 LInstruction* LChunkBuilder::DoHasCachedArrayIndexAndBranch(
1756  HHasCachedArrayIndexAndBranch* instr) {
1757  ASSERT(instr->value()->representation().IsTagged());
1758  return new(zone()) LHasCachedArrayIndexAndBranch(
1759  UseRegisterAtStart(instr->value()));
1760 }
1761 
1762 
1763 LInstruction* LChunkBuilder::DoClassOfTestAndBranch(
1764  HClassOfTestAndBranch* instr) {
1765  ASSERT(instr->value()->representation().IsTagged());
1766  LOperand* value = UseRegister(instr->value());
1767  return new(zone()) LClassOfTestAndBranch(value, TempRegister());
1768 }
1769 
1770 
1771 LInstruction* LChunkBuilder::DoMapEnumLength(HMapEnumLength* instr) {
1772  LOperand* map = UseRegisterAtStart(instr->value());
1773  return DefineAsRegister(new(zone()) LMapEnumLength(map));
1774 }
1775 
1776 
1777 LInstruction* LChunkBuilder::DoDateField(HDateField* instr) {
1778  LOperand* object = UseFixed(instr->value(), r0);
1779  LDateField* result =
1780  new(zone()) LDateField(object, FixedTemp(r1), instr->index());
1781  return MarkAsCall(DefineFixed(result, r0), instr, CAN_DEOPTIMIZE_EAGERLY);
1782 }
1783 
1784 
1785 LInstruction* LChunkBuilder::DoSeqStringGetChar(HSeqStringGetChar* instr) {
1786  LOperand* string = UseRegisterAtStart(instr->string());
1787  LOperand* index = UseRegisterOrConstantAtStart(instr->index());
1788  return DefineAsRegister(new(zone()) LSeqStringGetChar(string, index));
1789 }
1790 
1791 
1792 LInstruction* LChunkBuilder::DoSeqStringSetChar(HSeqStringSetChar* instr) {
1793  LOperand* string = UseRegisterAtStart(instr->string());
1794  LOperand* index = FLAG_debug_code
1795  ? UseRegisterAtStart(instr->index())
1796  : UseRegisterOrConstantAtStart(instr->index());
1797  LOperand* value = UseRegisterAtStart(instr->value());
1798  LOperand* context = FLAG_debug_code ? UseFixed(instr->context(), cp) : NULL;
1799  return new(zone()) LSeqStringSetChar(context, string, index, value);
1800 }
1801 
1802 
1803 LInstruction* LChunkBuilder::DoBoundsCheck(HBoundsCheck* instr) {
1804  LOperand* value = UseRegisterOrConstantAtStart(instr->index());
1805  LOperand* length = UseRegister(instr->length());
1806  return AssignEnvironment(new(zone()) LBoundsCheck(value, length));
1807 }
1808 
1809 
1810 LInstruction* LChunkBuilder::DoBoundsCheckBaseIndexInformation(
1811  HBoundsCheckBaseIndexInformation* instr) {
1812  UNREACHABLE();
1813  return NULL;
1814 }
1815 
1816 
1817 LInstruction* LChunkBuilder::DoAbnormalExit(HAbnormalExit* instr) {
1818  // The control instruction marking the end of a block that completed
1819  // abruptly (e.g., threw an exception). There is nothing specific to do.
1820  return NULL;
1821 }
1822 
1823 
1824 LInstruction* LChunkBuilder::DoUseConst(HUseConst* instr) {
1825  return NULL;
1826 }
1827 
1828 
1829 LInstruction* LChunkBuilder::DoForceRepresentation(HForceRepresentation* bad) {
1830  // All HForceRepresentation instructions should be eliminated in the
1831  // representation change phase of Hydrogen.
1832  UNREACHABLE();
1833  return NULL;
1834 }
1835 
1836 
1837 LInstruction* LChunkBuilder::DoChange(HChange* instr) {
1838  Representation from = instr->from();
1839  Representation to = instr->to();
1840  if (from.IsSmi()) {
1841  if (to.IsTagged()) {
1842  LOperand* value = UseRegister(instr->value());
1843  return DefineSameAsFirst(new(zone()) LDummyUse(value));
1844  }
1845  from = Representation::Tagged();
1846  }
1847  if (from.IsTagged()) {
1848  if (to.IsDouble()) {
1849  LOperand* value = UseRegister(instr->value());
1850  LNumberUntagD* res = new(zone()) LNumberUntagD(value);
1851  return AssignEnvironment(DefineAsRegister(res));
1852  } else if (to.IsSmi()) {
1853  HValue* val = instr->value();
1854  LOperand* value = UseRegister(val);
1855  if (val->type().IsSmi()) {
1856  return DefineSameAsFirst(new(zone()) LDummyUse(value));
1857  }
1858  return AssignEnvironment(DefineSameAsFirst(new(zone()) LCheckSmi(value)));
1859  } else {
1860  ASSERT(to.IsInteger32());
1861  LOperand* value = NULL;
1862  LInstruction* res = NULL;
1863  HValue* val = instr->value();
1864  if (val->type().IsSmi() || val->representation().IsSmi()) {
1865  value = UseRegisterAtStart(val);
1866  res = DefineAsRegister(new(zone()) LSmiUntag(value, false));
1867  } else {
1868  value = UseRegister(val);
1869  LOperand* temp1 = TempRegister();
1870  LOperand* temp2 = FixedTemp(d11);
1871  res = DefineSameAsFirst(new(zone()) LTaggedToI(value,
1872  temp1,
1873  temp2));
1874  res = AssignEnvironment(res);
1875  }
1876  return res;
1877  }
1878  } else if (from.IsDouble()) {
1879  if (to.IsTagged()) {
1880  info()->MarkAsDeferredCalling();
1881  LOperand* value = UseRegister(instr->value());
1882  LOperand* temp1 = TempRegister();
1883  LOperand* temp2 = TempRegister();
1884 
1885  // Make sure that the temp and result_temp registers are
1886  // different.
1887  LUnallocated* result_temp = TempRegister();
1888  LNumberTagD* result = new(zone()) LNumberTagD(value, temp1, temp2);
1889  Define(result, result_temp);
1890  return AssignPointerMap(result);
1891  } else if (to.IsSmi()) {
1892  LOperand* value = UseRegister(instr->value());
1893  return AssignEnvironment(
1894  DefineAsRegister(new(zone()) LDoubleToSmi(value)));
1895  } else {
1896  ASSERT(to.IsInteger32());
1897  LOperand* value = UseRegister(instr->value());
1898  LDoubleToI* res = new(zone()) LDoubleToI(value);
1899  return AssignEnvironment(DefineAsRegister(res));
1900  }
1901  } else if (from.IsInteger32()) {
1902  info()->MarkAsDeferredCalling();
1903  if (to.IsTagged()) {
1904  HValue* val = instr->value();
1905  LOperand* value = UseRegisterAtStart(val);
1906  if (!instr->CheckFlag(HValue::kCanOverflow)) {
1907  return DefineAsRegister(new(zone()) LSmiTag(value));
1908  } else if (val->CheckFlag(HInstruction::kUint32)) {
1909  LOperand* temp1 = TempRegister();
1910  LOperand* temp2 = TempRegister();
1911  LNumberTagU* result = new(zone()) LNumberTagU(value, temp1, temp2);
1912  return AssignEnvironment(AssignPointerMap(DefineAsRegister(result)));
1913  } else {
1914  LOperand* temp1 = TempRegister();
1915  LOperand* temp2 = TempRegister();
1916  LNumberTagI* result = new(zone()) LNumberTagI(value, temp1, temp2);
1917  return AssignEnvironment(AssignPointerMap(DefineAsRegister(result)));
1918  }
1919  } else if (to.IsSmi()) {
1920  HValue* val = instr->value();
1921  LOperand* value = UseRegister(val);
1922  LInstruction* result = DefineAsRegister(new(zone()) LSmiTag(value));
1923  if (instr->CheckFlag(HValue::kCanOverflow)) {
1924  result = AssignEnvironment(result);
1925  }
1926  return result;
1927  } else {
1928  ASSERT(to.IsDouble());
1929  if (instr->value()->CheckFlag(HInstruction::kUint32)) {
1930  return DefineAsRegister(
1931  new(zone()) LUint32ToDouble(UseRegister(instr->value())));
1932  } else {
1933  return DefineAsRegister(
1934  new(zone()) LInteger32ToDouble(Use(instr->value())));
1935  }
1936  }
1937  }
1938  UNREACHABLE();
1939  return NULL;
1940 }
1941 
1942 
1943 LInstruction* LChunkBuilder::DoCheckHeapObject(HCheckHeapObject* instr) {
1944  LOperand* value = UseRegisterAtStart(instr->value());
1945  return AssignEnvironment(new(zone()) LCheckNonSmi(value));
1946 }
1947 
1948 
1949 LInstruction* LChunkBuilder::DoCheckSmi(HCheckSmi* instr) {
1950  LOperand* value = UseRegisterAtStart(instr->value());
1951  return AssignEnvironment(new(zone()) LCheckSmi(value));
1952 }
1953 
1954 
1955 LInstruction* LChunkBuilder::DoCheckInstanceType(HCheckInstanceType* instr) {
1956  LOperand* value = UseRegisterAtStart(instr->value());
1957  LInstruction* result = new(zone()) LCheckInstanceType(value);
1958  return AssignEnvironment(result);
1959 }
1960 
1961 
1962 LInstruction* LChunkBuilder::DoCheckValue(HCheckValue* instr) {
1963  LOperand* value = UseRegisterAtStart(instr->value());
1964  return AssignEnvironment(new(zone()) LCheckValue(value));
1965 }
1966 
1967 
1968 LInstruction* LChunkBuilder::DoCheckMaps(HCheckMaps* instr) {
1969  LOperand* value = NULL;
1970  if (!instr->CanOmitMapChecks()) {
1971  value = UseRegisterAtStart(instr->value());
1972  if (instr->has_migration_target()) info()->MarkAsDeferredCalling();
1973  }
1974  LCheckMaps* result = new(zone()) LCheckMaps(value);
1975  if (!instr->CanOmitMapChecks()) {
1976  AssignEnvironment(result);
1977  if (instr->has_migration_target()) return AssignPointerMap(result);
1978  }
1979  return result;
1980 }
1981 
1982 
1983 LInstruction* LChunkBuilder::DoClampToUint8(HClampToUint8* instr) {
1984  HValue* value = instr->value();
1985  Representation input_rep = value->representation();
1986  LOperand* reg = UseRegister(value);
1987  if (input_rep.IsDouble()) {
1988  return DefineAsRegister(new(zone()) LClampDToUint8(reg));
1989  } else if (input_rep.IsInteger32()) {
1990  return DefineAsRegister(new(zone()) LClampIToUint8(reg));
1991  } else {
1992  ASSERT(input_rep.IsSmiOrTagged());
1993  // Register allocator doesn't (yet) support allocation of double
1994  // temps. Reserve d1 explicitly.
1995  LClampTToUint8* result = new(zone()) LClampTToUint8(reg, FixedTemp(d11));
1996  return AssignEnvironment(DefineAsRegister(result));
1997  }
1998 }
1999 
2000 
2001 LInstruction* LChunkBuilder::DoDoubleBits(HDoubleBits* instr) {
2002  HValue* value = instr->value();
2003  ASSERT(value->representation().IsDouble());
2004  return DefineAsRegister(new(zone()) LDoubleBits(UseRegister(value)));
2005 }
2006 
2007 
2008 LInstruction* LChunkBuilder::DoConstructDouble(HConstructDouble* instr) {
2009  LOperand* lo = UseRegister(instr->lo());
2010  LOperand* hi = UseRegister(instr->hi());
2011  return DefineAsRegister(new(zone()) LConstructDouble(hi, lo));
2012 }
2013 
2014 
2015 LInstruction* LChunkBuilder::DoReturn(HReturn* instr) {
2016  LOperand* context = info()->IsStub()
2017  ? UseFixed(instr->context(), cp)
2018  : NULL;
2019  LOperand* parameter_count = UseRegisterOrConstant(instr->parameter_count());
2020  return new(zone()) LReturn(UseFixed(instr->value(), r0), context,
2021  parameter_count);
2022 }
2023 
2024 
2025 LInstruction* LChunkBuilder::DoConstant(HConstant* instr) {
2026  Representation r = instr->representation();
2027  if (r.IsSmi()) {
2028  return DefineAsRegister(new(zone()) LConstantS);
2029  } else if (r.IsInteger32()) {
2030  return DefineAsRegister(new(zone()) LConstantI);
2031  } else if (r.IsDouble()) {
2032  return DefineAsRegister(new(zone()) LConstantD);
2033  } else if (r.IsExternal()) {
2034  return DefineAsRegister(new(zone()) LConstantE);
2035  } else if (r.IsTagged()) {
2036  return DefineAsRegister(new(zone()) LConstantT);
2037  } else {
2038  UNREACHABLE();
2039  return NULL;
2040  }
2041 }
2042 
2043 
2044 LInstruction* LChunkBuilder::DoLoadGlobalCell(HLoadGlobalCell* instr) {
2045  LLoadGlobalCell* result = new(zone()) LLoadGlobalCell;
2046  return instr->RequiresHoleCheck()
2047  ? AssignEnvironment(DefineAsRegister(result))
2048  : DefineAsRegister(result);
2049 }
2050 
2051 
2052 LInstruction* LChunkBuilder::DoLoadGlobalGeneric(HLoadGlobalGeneric* instr) {
2053  LOperand* context = UseFixed(instr->context(), cp);
2054  LOperand* global_object = UseFixed(instr->global_object(), r0);
2055  LLoadGlobalGeneric* result =
2056  new(zone()) LLoadGlobalGeneric(context, global_object);
2057  return MarkAsCall(DefineFixed(result, r0), instr);
2058 }
2059 
2060 
2061 LInstruction* LChunkBuilder::DoStoreGlobalCell(HStoreGlobalCell* instr) {
2062  LOperand* value = UseRegister(instr->value());
2063  // Use a temp to check the value in the cell in the case where we perform
2064  // a hole check.
2065  return instr->RequiresHoleCheck()
2066  ? AssignEnvironment(new(zone()) LStoreGlobalCell(value, TempRegister()))
2067  : new(zone()) LStoreGlobalCell(value, NULL);
2068 }
2069 
2070 
2071 LInstruction* LChunkBuilder::DoLoadContextSlot(HLoadContextSlot* instr) {
2072  LOperand* context = UseRegisterAtStart(instr->value());
2073  LInstruction* result =
2074  DefineAsRegister(new(zone()) LLoadContextSlot(context));
2075  return instr->RequiresHoleCheck() ? AssignEnvironment(result) : result;
2076 }
2077 
2078 
2079 LInstruction* LChunkBuilder::DoStoreContextSlot(HStoreContextSlot* instr) {
2080  LOperand* context;
2081  LOperand* value;
2082  if (instr->NeedsWriteBarrier()) {
2083  context = UseTempRegister(instr->context());
2084  value = UseTempRegister(instr->value());
2085  } else {
2086  context = UseRegister(instr->context());
2087  value = UseRegister(instr->value());
2088  }
2089  LInstruction* result = new(zone()) LStoreContextSlot(context, value);
2090  return instr->RequiresHoleCheck() ? AssignEnvironment(result) : result;
2091 }
2092 
2093 
2094 LInstruction* LChunkBuilder::DoLoadNamedField(HLoadNamedField* instr) {
2095  LOperand* obj = UseRegisterAtStart(instr->object());
2096  return DefineAsRegister(new(zone()) LLoadNamedField(obj));
2097 }
2098 
2099 
2100 LInstruction* LChunkBuilder::DoLoadNamedGeneric(HLoadNamedGeneric* instr) {
2101  LOperand* context = UseFixed(instr->context(), cp);
2102  LOperand* object = UseFixed(instr->object(), r0);
2103  LInstruction* result =
2104  DefineFixed(new(zone()) LLoadNamedGeneric(context, object), r0);
2105  return MarkAsCall(result, instr);
2106 }
2107 
2108 
2109 LInstruction* LChunkBuilder::DoLoadFunctionPrototype(
2110  HLoadFunctionPrototype* instr) {
2111  return AssignEnvironment(DefineAsRegister(
2112  new(zone()) LLoadFunctionPrototype(UseRegister(instr->function()))));
2113 }
2114 
2115 
2116 LInstruction* LChunkBuilder::DoLoadRoot(HLoadRoot* instr) {
2117  return DefineAsRegister(new(zone()) LLoadRoot);
2118 }
2119 
2120 
2121 LInstruction* LChunkBuilder::DoLoadKeyed(HLoadKeyed* instr) {
2122  ASSERT(instr->key()->representation().IsSmiOrInteger32());
2123  ElementsKind elements_kind = instr->elements_kind();
2124  LOperand* key = UseRegisterOrConstantAtStart(instr->key());
2125  LLoadKeyed* result = NULL;
2126 
2127  if (!instr->is_typed_elements()) {
2128  LOperand* obj = NULL;
2129  if (instr->representation().IsDouble()) {
2130  obj = UseRegister(instr->elements());
2131  } else {
2132  ASSERT(instr->representation().IsSmiOrTagged());
2133  obj = UseRegisterAtStart(instr->elements());
2134  }
2135  result = new(zone()) LLoadKeyed(obj, key);
2136  } else {
2137  ASSERT(
2138  (instr->representation().IsInteger32() &&
2139  !IsDoubleOrFloatElementsKind(instr->elements_kind())) ||
2140  (instr->representation().IsDouble() &&
2141  IsDoubleOrFloatElementsKind(instr->elements_kind())));
2142  LOperand* backing_store = UseRegister(instr->elements());
2143  result = new(zone()) LLoadKeyed(backing_store, key);
2144  }
2145 
2146  DefineAsRegister(result);
2147  // An unsigned int array load might overflow and cause a deopt, make sure it
2148  // has an environment.
2149  bool can_deoptimize = instr->RequiresHoleCheck() ||
2150  elements_kind == EXTERNAL_UINT32_ELEMENTS ||
2151  elements_kind == UINT32_ELEMENTS;
2152  return can_deoptimize ? AssignEnvironment(result) : result;
2153 }
2154 
2155 
2156 LInstruction* LChunkBuilder::DoLoadKeyedGeneric(HLoadKeyedGeneric* instr) {
2157  LOperand* context = UseFixed(instr->context(), cp);
2158  LOperand* object = UseFixed(instr->object(), r1);
2159  LOperand* key = UseFixed(instr->key(), r0);
2160 
2161  LInstruction* result =
2162  DefineFixed(new(zone()) LLoadKeyedGeneric(context, object, key), r0);
2163  return MarkAsCall(result, instr);
2164 }
2165 
2166 
2167 LInstruction* LChunkBuilder::DoStoreKeyed(HStoreKeyed* instr) {
2168  if (!instr->is_typed_elements()) {
2169  ASSERT(instr->elements()->representation().IsTagged());
2170  bool needs_write_barrier = instr->NeedsWriteBarrier();
2171  LOperand* object = NULL;
2172  LOperand* key = NULL;
2173  LOperand* val = NULL;
2174 
2175  if (instr->value()->representation().IsDouble()) {
2176  object = UseRegisterAtStart(instr->elements());
2177  val = UseRegister(instr->value());
2178  key = UseRegisterOrConstantAtStart(instr->key());
2179  } else {
2180  ASSERT(instr->value()->representation().IsSmiOrTagged());
2181  if (needs_write_barrier) {
2182  object = UseTempRegister(instr->elements());
2183  val = UseTempRegister(instr->value());
2184  key = UseTempRegister(instr->key());
2185  } else {
2186  object = UseRegisterAtStart(instr->elements());
2187  val = UseRegisterAtStart(instr->value());
2188  key = UseRegisterOrConstantAtStart(instr->key());
2189  }
2190  }
2191 
2192  return new(zone()) LStoreKeyed(object, key, val);
2193  }
2194 
2195  ASSERT(
2196  (instr->value()->representation().IsInteger32() &&
2197  !IsDoubleOrFloatElementsKind(instr->elements_kind())) ||
2198  (instr->value()->representation().IsDouble() &&
2199  IsDoubleOrFloatElementsKind(instr->elements_kind())));
2200  ASSERT((instr->is_fixed_typed_array() &&
2201  instr->elements()->representation().IsTagged()) ||
2202  (instr->is_external() &&
2203  instr->elements()->representation().IsExternal()));
2204  LOperand* val = UseRegister(instr->value());
2205  LOperand* key = UseRegisterOrConstantAtStart(instr->key());
2206  LOperand* backing_store = UseRegister(instr->elements());
2207  return new(zone()) LStoreKeyed(backing_store, key, val);
2208 }
2209 
2210 
2211 LInstruction* LChunkBuilder::DoStoreKeyedGeneric(HStoreKeyedGeneric* instr) {
2212  LOperand* context = UseFixed(instr->context(), cp);
2213  LOperand* obj = UseFixed(instr->object(), r2);
2214  LOperand* key = UseFixed(instr->key(), r1);
2215  LOperand* val = UseFixed(instr->value(), r0);
2216 
2217  ASSERT(instr->object()->representation().IsTagged());
2218  ASSERT(instr->key()->representation().IsTagged());
2219  ASSERT(instr->value()->representation().IsTagged());
2220 
2221  return MarkAsCall(
2222  new(zone()) LStoreKeyedGeneric(context, obj, key, val), instr);
2223 }
2224 
2225 
2226 LInstruction* LChunkBuilder::DoTransitionElementsKind(
2227  HTransitionElementsKind* instr) {
2228  LOperand* object = UseRegister(instr->object());
2229  if (IsSimpleMapChangeTransition(instr->from_kind(), instr->to_kind())) {
2230  LOperand* new_map_reg = TempRegister();
2231  LTransitionElementsKind* result =
2232  new(zone()) LTransitionElementsKind(object, NULL, new_map_reg);
2233  return result;
2234  } else {
2235  LOperand* context = UseFixed(instr->context(), cp);
2236  LTransitionElementsKind* result =
2237  new(zone()) LTransitionElementsKind(object, context, NULL);
2238  return AssignPointerMap(result);
2239  }
2240 }
2241 
2242 
2243 LInstruction* LChunkBuilder::DoTrapAllocationMemento(
2244  HTrapAllocationMemento* instr) {
2245  LOperand* object = UseRegister(instr->object());
2246  LOperand* temp = TempRegister();
2247  LTrapAllocationMemento* result =
2248  new(zone()) LTrapAllocationMemento(object, temp);
2249  return AssignEnvironment(result);
2250 }
2251 
2252 
2253 LInstruction* LChunkBuilder::DoStoreNamedField(HStoreNamedField* instr) {
2254  bool is_in_object = instr->access().IsInobject();
2255  bool needs_write_barrier = instr->NeedsWriteBarrier();
2256  bool needs_write_barrier_for_map = instr->has_transition() &&
2257  instr->NeedsWriteBarrierForMap();
2258 
2259  LOperand* obj;
2260  if (needs_write_barrier) {
2261  obj = is_in_object
2262  ? UseRegister(instr->object())
2263  : UseTempRegister(instr->object());
2264  } else {
2265  obj = needs_write_barrier_for_map
2266  ? UseRegister(instr->object())
2267  : UseRegisterAtStart(instr->object());
2268  }
2269 
2270  LOperand* val;
2271  if (needs_write_barrier || instr->field_representation().IsSmi()) {
2272  val = UseTempRegister(instr->value());
2273  } else if (instr->field_representation().IsDouble()) {
2274  val = UseRegisterAtStart(instr->value());
2275  } else {
2276  val = UseRegister(instr->value());
2277  }
2278 
2279  // We need a temporary register for write barrier of the map field.
2280  LOperand* temp = needs_write_barrier_for_map ? TempRegister() : NULL;
2281 
2282  LStoreNamedField* result = new(zone()) LStoreNamedField(obj, val, temp);
2283  if (instr->field_representation().IsHeapObject()) {
2284  if (!instr->value()->type().IsHeapObject()) {
2285  return AssignEnvironment(result);
2286  }
2287  }
2288  return result;
2289 }
2290 
2291 
2292 LInstruction* LChunkBuilder::DoStoreNamedGeneric(HStoreNamedGeneric* instr) {
2293  LOperand* context = UseFixed(instr->context(), cp);
2294  LOperand* obj = UseFixed(instr->object(), r1);
2295  LOperand* val = UseFixed(instr->value(), r0);
2296 
2297  LInstruction* result = new(zone()) LStoreNamedGeneric(context, obj, val);
2298  return MarkAsCall(result, instr);
2299 }
2300 
2301 
2302 LInstruction* LChunkBuilder::DoStringAdd(HStringAdd* instr) {
2303  LOperand* context = UseFixed(instr->context(), cp);
2304  LOperand* left = UseFixed(instr->left(), r1);
2305  LOperand* right = UseFixed(instr->right(), r0);
2306  return MarkAsCall(
2307  DefineFixed(new(zone()) LStringAdd(context, left, right), r0),
2308  instr);
2309 }
2310 
2311 
2312 LInstruction* LChunkBuilder::DoStringCharCodeAt(HStringCharCodeAt* instr) {
2313  LOperand* string = UseTempRegister(instr->string());
2314  LOperand* index = UseTempRegister(instr->index());
2315  LOperand* context = UseAny(instr->context());
2316  LStringCharCodeAt* result =
2317  new(zone()) LStringCharCodeAt(context, string, index);
2318  return AssignEnvironment(AssignPointerMap(DefineAsRegister(result)));
2319 }
2320 
2321 
2322 LInstruction* LChunkBuilder::DoStringCharFromCode(HStringCharFromCode* instr) {
2323  LOperand* char_code = UseRegister(instr->value());
2324  LOperand* context = UseAny(instr->context());
2325  LStringCharFromCode* result =
2326  new(zone()) LStringCharFromCode(context, char_code);
2327  return AssignPointerMap(DefineAsRegister(result));
2328 }
2329 
2330 
2331 LInstruction* LChunkBuilder::DoAllocate(HAllocate* instr) {
2332  info()->MarkAsDeferredCalling();
2333  LOperand* context = UseAny(instr->context());
2334  LOperand* size = instr->size()->IsConstant()
2335  ? UseConstant(instr->size())
2336  : UseTempRegister(instr->size());
2337  LOperand* temp1 = TempRegister();
2338  LOperand* temp2 = TempRegister();
2339  LAllocate* result = new(zone()) LAllocate(context, size, temp1, temp2);
2340  return AssignPointerMap(DefineAsRegister(result));
2341 }
2342 
2343 
2344 LInstruction* LChunkBuilder::DoRegExpLiteral(HRegExpLiteral* instr) {
2345  LOperand* context = UseFixed(instr->context(), cp);
2346  return MarkAsCall(
2347  DefineFixed(new(zone()) LRegExpLiteral(context), r0), instr);
2348 }
2349 
2350 
2351 LInstruction* LChunkBuilder::DoFunctionLiteral(HFunctionLiteral* instr) {
2352  LOperand* context = UseFixed(instr->context(), cp);
2353  return MarkAsCall(
2354  DefineFixed(new(zone()) LFunctionLiteral(context), r0), instr);
2355 }
2356 
2357 
2358 LInstruction* LChunkBuilder::DoOsrEntry(HOsrEntry* instr) {
2359  ASSERT(argument_count_ == 0);
2360  allocator_->MarkAsOsrEntry();
2361  current_block_->last_environment()->set_ast_id(instr->ast_id());
2362  return AssignEnvironment(new(zone()) LOsrEntry);
2363 }
2364 
2365 
2366 LInstruction* LChunkBuilder::DoParameter(HParameter* instr) {
2367  LParameter* result = new(zone()) LParameter;
2368  if (instr->kind() == HParameter::STACK_PARAMETER) {
2369  int spill_index = chunk()->GetParameterStackSlot(instr->index());
2370  return DefineAsSpilled(result, spill_index);
2371  } else {
2372  ASSERT(info()->IsStub());
2373  CodeStubInterfaceDescriptor* descriptor =
2374  info()->code_stub()->GetInterfaceDescriptor(info()->isolate());
2375  int index = static_cast<int>(instr->index());
2376  Register reg = descriptor->GetParameterRegister(index);
2377  return DefineFixed(result, reg);
2378  }
2379 }
2380 
2381 
2382 LInstruction* LChunkBuilder::DoUnknownOSRValue(HUnknownOSRValue* instr) {
2383  // Use an index that corresponds to the location in the unoptimized frame,
2384  // which the optimized frame will subsume.
2385  int env_index = instr->index();
2386  int spill_index = 0;
2387  if (instr->environment()->is_parameter_index(env_index)) {
2388  spill_index = chunk()->GetParameterStackSlot(env_index);
2389  } else {
2390  spill_index = env_index - instr->environment()->first_local_index();
2391  if (spill_index > LUnallocated::kMaxFixedSlotIndex) {
2392  Abort(kTooManySpillSlotsNeededForOSR);
2393  spill_index = 0;
2394  }
2395  }
2396  return DefineAsSpilled(new(zone()) LUnknownOSRValue, spill_index);
2397 }
2398 
2399 
2400 LInstruction* LChunkBuilder::DoCallStub(HCallStub* instr) {
2401  LOperand* context = UseFixed(instr->context(), cp);
2402  return MarkAsCall(DefineFixed(new(zone()) LCallStub(context), r0), instr);
2403 }
2404 
2405 
2406 LInstruction* LChunkBuilder::DoArgumentsObject(HArgumentsObject* instr) {
2407  // There are no real uses of the arguments object.
2408  // arguments.length and element access are supported directly on
2409  // stack arguments, and any real arguments object use causes a bailout.
2410  // So this value is never used.
2411  return NULL;
2412 }
2413 
2414 
2415 LInstruction* LChunkBuilder::DoCapturedObject(HCapturedObject* instr) {
2416  instr->ReplayEnvironment(current_block_->last_environment());
2417 
2418  // There are no real uses of a captured object.
2419  return NULL;
2420 }
2421 
2422 
2423 LInstruction* LChunkBuilder::DoAccessArgumentsAt(HAccessArgumentsAt* instr) {
2424  info()->MarkAsRequiresFrame();
2425  LOperand* args = UseRegister(instr->arguments());
2426  LOperand* length = UseRegisterOrConstantAtStart(instr->length());
2427  LOperand* index = UseRegisterOrConstantAtStart(instr->index());
2428  return DefineAsRegister(new(zone()) LAccessArgumentsAt(args, length, index));
2429 }
2430 
2431 
2432 LInstruction* LChunkBuilder::DoToFastProperties(HToFastProperties* instr) {
2433  LOperand* object = UseFixed(instr->value(), r0);
2434  LToFastProperties* result = new(zone()) LToFastProperties(object);
2435  return MarkAsCall(DefineFixed(result, r0), instr);
2436 }
2437 
2438 
2439 LInstruction* LChunkBuilder::DoTypeof(HTypeof* instr) {
2440  LOperand* context = UseFixed(instr->context(), cp);
2441  LTypeof* result = new(zone()) LTypeof(context, UseFixed(instr->value(), r0));
2442  return MarkAsCall(DefineFixed(result, r0), instr);
2443 }
2444 
2445 
2446 LInstruction* LChunkBuilder::DoTypeofIsAndBranch(HTypeofIsAndBranch* instr) {
2447  LInstruction* goto_instr = CheckElideControlInstruction(instr);
2448  if (goto_instr != NULL) return goto_instr;
2449 
2450  return new(zone()) LTypeofIsAndBranch(UseRegister(instr->value()));
2451 }
2452 
2453 
2454 LInstruction* LChunkBuilder::DoIsConstructCallAndBranch(
2455  HIsConstructCallAndBranch* instr) {
2456  return new(zone()) LIsConstructCallAndBranch(TempRegister());
2457 }
2458 
2459 
2460 LInstruction* LChunkBuilder::DoSimulate(HSimulate* instr) {
2461  instr->ReplayEnvironment(current_block_->last_environment());
2462  return NULL;
2463 }
2464 
2465 
2466 LInstruction* LChunkBuilder::DoStackCheck(HStackCheck* instr) {
2467  if (instr->is_function_entry()) {
2468  LOperand* context = UseFixed(instr->context(), cp);
2469  return MarkAsCall(new(zone()) LStackCheck(context), instr);
2470  } else {
2471  ASSERT(instr->is_backwards_branch());
2472  LOperand* context = UseAny(instr->context());
2473  return AssignEnvironment(
2474  AssignPointerMap(new(zone()) LStackCheck(context)));
2475  }
2476 }
2477 
2478 
2479 LInstruction* LChunkBuilder::DoEnterInlined(HEnterInlined* instr) {
2480  HEnvironment* outer = current_block_->last_environment();
2481  HConstant* undefined = graph()->GetConstantUndefined();
2482  HEnvironment* inner = outer->CopyForInlining(instr->closure(),
2483  instr->arguments_count(),
2484  instr->function(),
2485  undefined,
2486  instr->inlining_kind());
2487  // Only replay binding of arguments object if it wasn't removed from graph.
2488  if (instr->arguments_var() != NULL && instr->arguments_object()->IsLinked()) {
2489  inner->Bind(instr->arguments_var(), instr->arguments_object());
2490  }
2491  inner->set_entry(instr);
2492  current_block_->UpdateEnvironment(inner);
2493  chunk_->AddInlinedClosure(instr->closure());
2494  return NULL;
2495 }
2496 
2497 
2498 LInstruction* LChunkBuilder::DoLeaveInlined(HLeaveInlined* instr) {
2499  LInstruction* pop = NULL;
2500 
2501  HEnvironment* env = current_block_->last_environment();
2502 
2503  if (env->entry()->arguments_pushed()) {
2504  int argument_count = env->arguments_environment()->parameter_count();
2505  pop = new(zone()) LDrop(argument_count);
2506  ASSERT(instr->argument_delta() == -argument_count);
2507  }
2508 
2509  HEnvironment* outer = current_block_->last_environment()->
2510  DiscardInlined(false);
2511  current_block_->UpdateEnvironment(outer);
2512 
2513  return pop;
2514 }
2515 
2516 
2517 LInstruction* LChunkBuilder::DoForInPrepareMap(HForInPrepareMap* instr) {
2518  LOperand* context = UseFixed(instr->context(), cp);
2519  LOperand* object = UseFixed(instr->enumerable(), r0);
2520  LForInPrepareMap* result = new(zone()) LForInPrepareMap(context, object);
2521  return MarkAsCall(DefineFixed(result, r0), instr, CAN_DEOPTIMIZE_EAGERLY);
2522 }
2523 
2524 
2525 LInstruction* LChunkBuilder::DoForInCacheArray(HForInCacheArray* instr) {
2526  LOperand* map = UseRegister(instr->map());
2527  return AssignEnvironment(DefineAsRegister(new(zone()) LForInCacheArray(map)));
2528 }
2529 
2530 
2531 LInstruction* LChunkBuilder::DoCheckMapValue(HCheckMapValue* instr) {
2532  LOperand* value = UseRegisterAtStart(instr->value());
2533  LOperand* map = UseRegisterAtStart(instr->map());
2534  return AssignEnvironment(new(zone()) LCheckMapValue(value, map));
2535 }
2536 
2537 
2538 LInstruction* LChunkBuilder::DoLoadFieldByIndex(HLoadFieldByIndex* instr) {
2539  LOperand* object = UseRegister(instr->object());
2540  LOperand* index = UseRegister(instr->index());
2541  return DefineAsRegister(new(zone()) LLoadFieldByIndex(object, index));
2542 }
2543 
2544 } } // namespace v8::internal
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter NULL
Definition: flags.cc:269
#define DEFINE_COMPILE(type)
Definition: lithium-arm.cc:38
static LUnallocated * cast(LOperand *op)
Definition: lithium.h:156
const char * ToCString(const v8::String::Utf8Value &value)
void PrintDataTo(StringStream *stream) V8_OVERRIDE
const Register r3
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf map
Definition: flags.cc:350
virtual void PrintDataTo(StringStream *stream) V8_OVERRIDE
Definition: lithium-arm.cc:124
static String * cast(Object *obj)
const LowDwVfpRegister d11
virtual void PrintOutputOperandTo(StringStream *stream)
Definition: lithium-arm.cc:99
const Register cp
const LowDwVfpRegister d0
int int32_t
Definition: unicode.cc:47
static bool IsSupported(CpuFeature f)
Definition: assembler-arm.h:68
LEnvironment * environment() const
Definition: lithium-arm.h:246
#define ASSERT(condition)
Definition: checks.h:329
virtual const char * Mnemonic() const =0
const LowDwVfpRegister d3
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:86
const Register r2
bool IsSimpleMapChangeTransition(ElementsKind from_kind, ElementsKind to_kind)
#define LITHIUM_CONCRETE_INSTRUCTION_LIST(V)
Definition: lithium-arm.h:43
const LowDwVfpRegister d10
virtual LOperand * result() const =0
static const int kMaxFixedSlotIndex
Definition: lithium.h:195
uint32_t additional_index() const
void Add(Vector< const char > format, Vector< FmtElm > elms)
virtual bool HasResult() const =0
#define UNREACHABLE()
Definition: checks.h:52
DwVfpRegister DoubleRegister
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object size
Definition: flags.cc:211
const LowDwVfpRegister d4
static const char * String(Value tok)
Definition: token.h:294
bool HasEnvironment() const
Definition: lithium-arm.h:247
static int ToAllocationIndex(Register reg)
const Register r0
uint32_t additional_index() const
virtual void PrintTo(StringStream *stream)
Definition: lithium-arm.cc:67
LPointerMap * pointer_map() const
Definition: lithium-arm.h:250
const LowDwVfpRegister d2
const Register r1
const char * ElementsKindToString(ElementsKind kind)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function info
Definition: flags.cc:317
void PrintDataTo(StringStream *stream) V8_OVERRIDE
static int ToAllocationIndex(DwVfpRegister reg)
bool IsDoubleOrFloatElementsKind(ElementsKind kind)
HeapObject * obj
bool HasPointerMap() const
Definition: lithium-arm.h:251
const LowDwVfpRegister d1
static Representation Tagged()
bool IsRedundant() const
Definition: lithium-arm.cc:113
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in name
Definition: flags.cc:505
static HValue * cast(HValue *value)
void PrintTo(StringStream *stream)
Definition: lithium.cc:55
const Register r4