v8  3.25.30(node0.11.13)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
lithium-x64.cc
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "v8.h"
29 
30 #if V8_TARGET_ARCH_X64
31 
32 #include "lithium-allocator-inl.h"
33 #include "x64/lithium-x64.h"
35 #include "hydrogen-osr.h"
36 
37 namespace v8 {
38 namespace internal {
39 
40 #define DEFINE_COMPILE(type) \
41  void L##type::CompileToNative(LCodeGen* generator) { \
42  generator->Do##type(this); \
43  }
45 #undef DEFINE_COMPILE
46 
47 
48 #ifdef DEBUG
49 void LInstruction::VerifyCall() {
50  // Call instructions can use only fixed registers as temporaries and
51  // outputs because all registers are blocked by the calling convention.
52  // Inputs operands must use a fixed register or use-at-start policy or
53  // a non-register policy.
54  ASSERT(Output() == NULL ||
55  LUnallocated::cast(Output())->HasFixedPolicy() ||
56  !LUnallocated::cast(Output())->HasRegisterPolicy());
57  for (UseIterator it(this); !it.Done(); it.Advance()) {
58  LUnallocated* operand = LUnallocated::cast(it.Current());
59  ASSERT(operand->HasFixedPolicy() ||
60  operand->IsUsedAtStart());
61  }
62  for (TempIterator it(this); !it.Done(); it.Advance()) {
63  LUnallocated* operand = LUnallocated::cast(it.Current());
64  ASSERT(operand->HasFixedPolicy() ||!operand->HasRegisterPolicy());
65  }
66 }
67 #endif
68 
69 
70 void LInstruction::PrintTo(StringStream* stream) {
71  stream->Add("%s ", this->Mnemonic());
72 
73  PrintOutputOperandTo(stream);
74 
75  PrintDataTo(stream);
76 
77  if (HasEnvironment()) {
78  stream->Add(" ");
79  environment()->PrintTo(stream);
80  }
81 
82  if (HasPointerMap()) {
83  stream->Add(" ");
84  pointer_map()->PrintTo(stream);
85  }
86 }
87 
88 
89 void LInstruction::PrintDataTo(StringStream* stream) {
90  stream->Add("= ");
91  for (int i = 0; i < InputCount(); i++) {
92  if (i > 0) stream->Add(" ");
93  if (InputAt(i) == NULL) {
94  stream->Add("NULL");
95  } else {
96  InputAt(i)->PrintTo(stream);
97  }
98  }
99 }
100 
101 
102 void LInstruction::PrintOutputOperandTo(StringStream* stream) {
103  if (HasResult()) result()->PrintTo(stream);
104 }
105 
106 
107 void LLabel::PrintDataTo(StringStream* stream) {
108  LGap::PrintDataTo(stream);
109  LLabel* rep = replacement();
110  if (rep != NULL) {
111  stream->Add(" Dead block replaced with B%d", rep->block_id());
112  }
113 }
114 
115 
116 bool LGap::IsRedundant() const {
117  for (int i = 0; i < 4; i++) {
118  if (parallel_moves_[i] != NULL && !parallel_moves_[i]->IsRedundant()) {
119  return false;
120  }
121  }
122 
123  return true;
124 }
125 
126 
127 void LGap::PrintDataTo(StringStream* stream) {
128  for (int i = 0; i < 4; i++) {
129  stream->Add("(");
130  if (parallel_moves_[i] != NULL) {
131  parallel_moves_[i]->PrintDataTo(stream);
132  }
133  stream->Add(") ");
134  }
135 }
136 
137 
138 const char* LArithmeticD::Mnemonic() const {
139  switch (op()) {
140  case Token::ADD: return "add-d";
141  case Token::SUB: return "sub-d";
142  case Token::MUL: return "mul-d";
143  case Token::DIV: return "div-d";
144  case Token::MOD: return "mod-d";
145  default:
146  UNREACHABLE();
147  return NULL;
148  }
149 }
150 
151 
152 const char* LArithmeticT::Mnemonic() const {
153  switch (op()) {
154  case Token::ADD: return "add-t";
155  case Token::SUB: return "sub-t";
156  case Token::MUL: return "mul-t";
157  case Token::MOD: return "mod-t";
158  case Token::DIV: return "div-t";
159  case Token::BIT_AND: return "bit-and-t";
160  case Token::BIT_OR: return "bit-or-t";
161  case Token::BIT_XOR: return "bit-xor-t";
162  case Token::ROR: return "ror-t";
163  case Token::SHL: return "sal-t";
164  case Token::SAR: return "sar-t";
165  case Token::SHR: return "shr-t";
166  default:
167  UNREACHABLE();
168  return NULL;
169  }
170 }
171 
172 
173 bool LGoto::HasInterestingComment(LCodeGen* gen) const {
174  return !gen->IsNextEmittedBlock(block_id());
175 }
176 
177 
178 template<int R>
180  LPlatformChunk* chunk) const {
181  HValue* hvalue = this->hydrogen_value();
182 
183  if (hvalue == NULL) return false;
184  if (!hvalue->representation().IsInteger32()) return false;
185  if (hvalue->HasRange() && !hvalue->range()->CanBeNegative()) return false;
186 
187  return chunk->GetDehoistedKeyIds()->Contains(hvalue->id());
188 }
189 
190 
191 void LGoto::PrintDataTo(StringStream* stream) {
192  stream->Add("B%d", block_id());
193 }
194 
195 
196 void LBranch::PrintDataTo(StringStream* stream) {
197  stream->Add("B%d | B%d on ", true_block_id(), false_block_id());
198  value()->PrintTo(stream);
199 }
200 
201 
202 void LCompareNumericAndBranch::PrintDataTo(StringStream* stream) {
203  stream->Add("if ");
204  left()->PrintTo(stream);
205  stream->Add(" %s ", Token::String(op()));
206  right()->PrintTo(stream);
207  stream->Add(" then B%d else B%d", true_block_id(), false_block_id());
208 }
209 
210 
211 void LIsObjectAndBranch::PrintDataTo(StringStream* stream) {
212  stream->Add("if is_object(");
213  value()->PrintTo(stream);
214  stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
215 }
216 
217 
218 void LIsStringAndBranch::PrintDataTo(StringStream* stream) {
219  stream->Add("if is_string(");
220  value()->PrintTo(stream);
221  stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
222 }
223 
224 
225 void LIsSmiAndBranch::PrintDataTo(StringStream* stream) {
226  stream->Add("if is_smi(");
227  value()->PrintTo(stream);
228  stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
229 }
230 
231 
232 void LIsUndetectableAndBranch::PrintDataTo(StringStream* stream) {
233  stream->Add("if is_undetectable(");
234  value()->PrintTo(stream);
235  stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
236 }
237 
238 
239 void LStringCompareAndBranch::PrintDataTo(StringStream* stream) {
240  stream->Add("if string_compare(");
241  left()->PrintTo(stream);
242  right()->PrintTo(stream);
243  stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
244 }
245 
246 
247 void LHasInstanceTypeAndBranch::PrintDataTo(StringStream* stream) {
248  stream->Add("if has_instance_type(");
249  value()->PrintTo(stream);
250  stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
251 }
252 
253 
254 void LHasCachedArrayIndexAndBranch::PrintDataTo(StringStream* stream) {
255  stream->Add("if has_cached_array_index(");
256  value()->PrintTo(stream);
257  stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
258 }
259 
260 
261 void LClassOfTestAndBranch::PrintDataTo(StringStream* stream) {
262  stream->Add("if class_of_test(");
263  value()->PrintTo(stream);
264  stream->Add(", \"%o\") then B%d else B%d",
265  *hydrogen()->class_name(),
266  true_block_id(),
267  false_block_id());
268 }
269 
270 
271 void LTypeofIsAndBranch::PrintDataTo(StringStream* stream) {
272  stream->Add("if typeof ");
273  value()->PrintTo(stream);
274  stream->Add(" == \"%s\" then B%d else B%d",
275  hydrogen()->type_literal()->ToCString().get(),
276  true_block_id(), false_block_id());
277 }
278 
279 
280 void LStoreCodeEntry::PrintDataTo(StringStream* stream) {
281  stream->Add(" = ");
282  function()->PrintTo(stream);
283  stream->Add(".code_entry = ");
284  code_object()->PrintTo(stream);
285 }
286 
287 
288 void LInnerAllocatedObject::PrintDataTo(StringStream* stream) {
289  stream->Add(" = ");
290  base_object()->PrintTo(stream);
291  stream->Add(" + ");
292  offset()->PrintTo(stream);
293 }
294 
295 
296 void LCallJSFunction::PrintDataTo(StringStream* stream) {
297  stream->Add("= ");
298  function()->PrintTo(stream);
299  stream->Add("#%d / ", arity());
300 }
301 
302 
303 void LCallWithDescriptor::PrintDataTo(StringStream* stream) {
304  for (int i = 0; i < InputCount(); i++) {
305  InputAt(i)->PrintTo(stream);
306  stream->Add(" ");
307  }
308  stream->Add("#%d / ", arity());
309 }
310 
311 
312 void LLoadContextSlot::PrintDataTo(StringStream* stream) {
313  context()->PrintTo(stream);
314  stream->Add("[%d]", slot_index());
315 }
316 
317 
318 void LStoreContextSlot::PrintDataTo(StringStream* stream) {
319  context()->PrintTo(stream);
320  stream->Add("[%d] <- ", slot_index());
321  value()->PrintTo(stream);
322 }
323 
324 
325 void LInvokeFunction::PrintDataTo(StringStream* stream) {
326  stream->Add("= ");
327  function()->PrintTo(stream);
328  stream->Add(" #%d / ", arity());
329 }
330 
331 
332 void LCallNew::PrintDataTo(StringStream* stream) {
333  stream->Add("= ");
334  constructor()->PrintTo(stream);
335  stream->Add(" #%d / ", arity());
336 }
337 
338 
339 void LCallNewArray::PrintDataTo(StringStream* stream) {
340  stream->Add("= ");
341  constructor()->PrintTo(stream);
342  stream->Add(" #%d / ", arity());
343  ElementsKind kind = hydrogen()->elements_kind();
344  stream->Add(" (%s) ", ElementsKindToString(kind));
345 }
346 
347 
348 void LAccessArgumentsAt::PrintDataTo(StringStream* stream) {
349  arguments()->PrintTo(stream);
350 
351  stream->Add(" length ");
352  length()->PrintTo(stream);
353 
354  stream->Add(" index ");
355  index()->PrintTo(stream);
356 }
357 
358 
359 int LPlatformChunk::GetNextSpillIndex(RegisterKind kind) {
360  return spill_slot_count_++;
361 }
362 
363 
364 LOperand* LPlatformChunk::GetNextSpillSlot(RegisterKind kind) {
365  // All stack slots are Double stack slots on x64.
366  // Alternatively, at some point, start using half-size
367  // stack slots for int32 values.
368  int index = GetNextSpillIndex(kind);
369  if (kind == DOUBLE_REGISTERS) {
370  return LDoubleStackSlot::Create(index, zone());
371  } else {
372  ASSERT(kind == GENERAL_REGISTERS);
373  return LStackSlot::Create(index, zone());
374  }
375 }
376 
377 
378 void LStoreNamedField::PrintDataTo(StringStream* stream) {
379  object()->PrintTo(stream);
380  hydrogen()->access().PrintTo(stream);
381  stream->Add(" <- ");
382  value()->PrintTo(stream);
383 }
384 
385 
386 void LStoreNamedGeneric::PrintDataTo(StringStream* stream) {
387  object()->PrintTo(stream);
388  stream->Add(".");
389  stream->Add(String::cast(*name())->ToCString().get());
390  stream->Add(" <- ");
391  value()->PrintTo(stream);
392 }
393 
394 
395 void LLoadKeyed::PrintDataTo(StringStream* stream) {
396  elements()->PrintTo(stream);
397  stream->Add("[");
398  key()->PrintTo(stream);
399  if (hydrogen()->IsDehoisted()) {
400  stream->Add(" + %d]", additional_index());
401  } else {
402  stream->Add("]");
403  }
404 }
405 
406 
407 void LStoreKeyed::PrintDataTo(StringStream* stream) {
408  elements()->PrintTo(stream);
409  stream->Add("[");
410  key()->PrintTo(stream);
411  if (hydrogen()->IsDehoisted()) {
412  stream->Add(" + %d] <-", additional_index());
413  } else {
414  stream->Add("] <- ");
415  }
416 
417  if (value() == NULL) {
418  ASSERT(hydrogen()->IsConstantHoleStore() &&
419  hydrogen()->value()->representation().IsDouble());
420  stream->Add("<the hole(nan)>");
421  } else {
422  value()->PrintTo(stream);
423  }
424 }
425 
426 
427 void LStoreKeyedGeneric::PrintDataTo(StringStream* stream) {
428  object()->PrintTo(stream);
429  stream->Add("[");
430  key()->PrintTo(stream);
431  stream->Add("] <- ");
432  value()->PrintTo(stream);
433 }
434 
435 
436 void LTransitionElementsKind::PrintDataTo(StringStream* stream) {
437  object()->PrintTo(stream);
438  stream->Add(" %p -> %p", *original_map(), *transitioned_map());
439 }
440 
441 
442 LPlatformChunk* LChunkBuilder::Build() {
443  ASSERT(is_unused());
444  chunk_ = new(zone()) LPlatformChunk(info(), graph());
445  LPhase phase("L_Building chunk", chunk_);
446  status_ = BUILDING;
447 
448  // If compiling for OSR, reserve space for the unoptimized frame,
449  // which will be subsumed into this frame.
450  if (graph()->has_osr()) {
451  for (int i = graph()->osr()->UnoptimizedFrameSlots(); i > 0; i--) {
452  chunk_->GetNextSpillIndex(GENERAL_REGISTERS);
453  }
454  }
455 
456  const ZoneList<HBasicBlock*>* blocks = graph()->blocks();
457  for (int i = 0; i < blocks->length(); i++) {
458  HBasicBlock* next = NULL;
459  if (i < blocks->length() - 1) next = blocks->at(i + 1);
460  DoBasicBlock(blocks->at(i), next);
461  if (is_aborted()) return NULL;
462  }
463  status_ = DONE;
464  return chunk_;
465 }
466 
467 
468 void LCodeGen::Abort(BailoutReason reason) {
469  info()->set_bailout_reason(reason);
470  status_ = ABORTED;
471 }
472 
473 
474 LUnallocated* LChunkBuilder::ToUnallocated(Register reg) {
475  return new(zone()) LUnallocated(LUnallocated::FIXED_REGISTER,
477 }
478 
479 
480 LUnallocated* LChunkBuilder::ToUnallocated(XMMRegister reg) {
481  return new(zone()) LUnallocated(LUnallocated::FIXED_DOUBLE_REGISTER,
483 }
484 
485 
486 LOperand* LChunkBuilder::UseFixed(HValue* value, Register fixed_register) {
487  return Use(value, ToUnallocated(fixed_register));
488 }
489 
490 
491 LOperand* LChunkBuilder::UseFixedDouble(HValue* value, XMMRegister reg) {
492  return Use(value, ToUnallocated(reg));
493 }
494 
495 
496 LOperand* LChunkBuilder::UseRegister(HValue* value) {
497  return Use(value, new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER));
498 }
499 
500 
501 LOperand* LChunkBuilder::UseRegisterAtStart(HValue* value) {
502  return Use(value,
503  new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER,
505 }
506 
507 
508 LOperand* LChunkBuilder::UseTempRegister(HValue* value) {
509  return Use(value, new(zone()) LUnallocated(LUnallocated::WRITABLE_REGISTER));
510 }
511 
512 
513 LOperand* LChunkBuilder::UseTempRegisterOrConstant(HValue* value) {
514  return value->IsConstant()
515  ? chunk_->DefineConstantOperand(HConstant::cast(value))
516  : UseTempRegister(value);
517 }
518 
519 
520 LOperand* LChunkBuilder::Use(HValue* value) {
521  return Use(value, new(zone()) LUnallocated(LUnallocated::NONE));
522 }
523 
524 
525 LOperand* LChunkBuilder::UseAtStart(HValue* value) {
526  return Use(value, new(zone()) LUnallocated(LUnallocated::NONE,
528 }
529 
530 
531 LOperand* LChunkBuilder::UseOrConstant(HValue* value) {
532  return value->IsConstant()
533  ? chunk_->DefineConstantOperand(HConstant::cast(value))
534  : Use(value);
535 }
536 
537 
538 LOperand* LChunkBuilder::UseOrConstantAtStart(HValue* value) {
539  return value->IsConstant()
540  ? chunk_->DefineConstantOperand(HConstant::cast(value))
541  : UseAtStart(value);
542 }
543 
544 
545 LOperand* LChunkBuilder::UseRegisterOrConstant(HValue* value) {
546  return value->IsConstant()
547  ? chunk_->DefineConstantOperand(HConstant::cast(value))
548  : UseRegister(value);
549 }
550 
551 
552 LOperand* LChunkBuilder::UseRegisterOrConstantAtStart(HValue* value) {
553  return value->IsConstant()
554  ? chunk_->DefineConstantOperand(HConstant::cast(value))
555  : UseRegisterAtStart(value);
556 }
557 
558 
559 LOperand* LChunkBuilder::UseConstant(HValue* value) {
560  return chunk_->DefineConstantOperand(HConstant::cast(value));
561 }
562 
563 
564 LOperand* LChunkBuilder::UseAny(HValue* value) {
565  return value->IsConstant()
566  ? chunk_->DefineConstantOperand(HConstant::cast(value))
567  : Use(value, new(zone()) LUnallocated(LUnallocated::ANY));
568 }
569 
570 
571 LOperand* LChunkBuilder::Use(HValue* value, LUnallocated* operand) {
572  if (value->EmitAtUses()) {
573  HInstruction* instr = HInstruction::cast(value);
574  VisitInstruction(instr);
575  }
576  operand->set_virtual_register(value->id());
577  return operand;
578 }
579 
580 
581 LInstruction* LChunkBuilder::Define(LTemplateResultInstruction<1>* instr,
582  LUnallocated* result) {
583  result->set_virtual_register(current_instruction_->id());
584  instr->set_result(result);
585  return instr;
586 }
587 
588 
589 LInstruction* LChunkBuilder::DefineAsRegister(
590  LTemplateResultInstruction<1>* instr) {
591  return Define(instr,
592  new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER));
593 }
594 
595 
596 LInstruction* LChunkBuilder::DefineAsSpilled(
597  LTemplateResultInstruction<1>* instr,
598  int index) {
599  return Define(instr,
600  new(zone()) LUnallocated(LUnallocated::FIXED_SLOT, index));
601 }
602 
603 
604 LInstruction* LChunkBuilder::DefineSameAsFirst(
605  LTemplateResultInstruction<1>* instr) {
606  return Define(instr,
607  new(zone()) LUnallocated(LUnallocated::SAME_AS_FIRST_INPUT));
608 }
609 
610 
611 LInstruction* LChunkBuilder::DefineFixed(LTemplateResultInstruction<1>* instr,
612  Register reg) {
613  return Define(instr, ToUnallocated(reg));
614 }
615 
616 
617 LInstruction* LChunkBuilder::DefineFixedDouble(
618  LTemplateResultInstruction<1>* instr,
619  XMMRegister reg) {
620  return Define(instr, ToUnallocated(reg));
621 }
622 
623 
624 LInstruction* LChunkBuilder::AssignEnvironment(LInstruction* instr) {
625  HEnvironment* hydrogen_env = current_block_->last_environment();
626  int argument_index_accumulator = 0;
627  ZoneList<HValue*> objects_to_materialize(0, zone());
628  instr->set_environment(CreateEnvironment(hydrogen_env,
629  &argument_index_accumulator,
630  &objects_to_materialize));
631  return instr;
632 }
633 
634 
635 LInstruction* LChunkBuilder::MarkAsCall(LInstruction* instr,
636  HInstruction* hinstr,
637  CanDeoptimize can_deoptimize) {
638  info()->MarkAsNonDeferredCalling();
639 
640 #ifdef DEBUG
641  instr->VerifyCall();
642 #endif
643  instr->MarkAsCall();
644  instr = AssignPointerMap(instr);
645 
646  // If instruction does not have side-effects lazy deoptimization
647  // after the call will try to deoptimize to the point before the call.
648  // Thus we still need to attach environment to this call even if
649  // call sequence can not deoptimize eagerly.
650  bool needs_environment =
651  (can_deoptimize == CAN_DEOPTIMIZE_EAGERLY) ||
652  !hinstr->HasObservableSideEffects();
653  if (needs_environment && !instr->HasEnvironment()) {
654  instr = AssignEnvironment(instr);
655  }
656 
657  return instr;
658 }
659 
660 
661 LInstruction* LChunkBuilder::AssignPointerMap(LInstruction* instr) {
662  ASSERT(!instr->HasPointerMap());
663  instr->set_pointer_map(new(zone()) LPointerMap(zone()));
664  return instr;
665 }
666 
667 
668 LUnallocated* LChunkBuilder::TempRegister() {
669  LUnallocated* operand =
670  new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER);
671  int vreg = allocator_->GetVirtualRegister();
672  if (!allocator_->AllocationOk()) {
673  Abort(kOutOfVirtualRegistersWhileTryingToAllocateTempRegister);
674  vreg = 0;
675  }
676  operand->set_virtual_register(vreg);
677  return operand;
678 }
679 
680 
681 LOperand* LChunkBuilder::FixedTemp(Register reg) {
682  LUnallocated* operand = ToUnallocated(reg);
683  ASSERT(operand->HasFixedPolicy());
684  return operand;
685 }
686 
687 
688 LOperand* LChunkBuilder::FixedTemp(XMMRegister reg) {
689  LUnallocated* operand = ToUnallocated(reg);
690  ASSERT(operand->HasFixedPolicy());
691  return operand;
692 }
693 
694 
695 LInstruction* LChunkBuilder::DoBlockEntry(HBlockEntry* instr) {
696  return new(zone()) LLabel(instr->block());
697 }
698 
699 
700 LInstruction* LChunkBuilder::DoDummyUse(HDummyUse* instr) {
701  return DefineAsRegister(new(zone()) LDummyUse(UseAny(instr->value())));
702 }
703 
704 
705 LInstruction* LChunkBuilder::DoEnvironmentMarker(HEnvironmentMarker* instr) {
706  UNREACHABLE();
707  return NULL;
708 }
709 
710 
711 LInstruction* LChunkBuilder::DoDeoptimize(HDeoptimize* instr) {
712  return AssignEnvironment(new(zone()) LDeoptimize);
713 }
714 
715 
716 LInstruction* LChunkBuilder::DoShift(Token::Value op,
717  HBitwiseBinaryOperation* instr) {
718  if (instr->representation().IsSmiOrInteger32()) {
719  ASSERT(instr->left()->representation().Equals(instr->representation()));
720  ASSERT(instr->right()->representation().Equals(instr->representation()));
721  LOperand* left = UseRegisterAtStart(instr->left());
722 
723  HValue* right_value = instr->right();
724  LOperand* right = NULL;
725  int constant_value = 0;
726  if (right_value->IsConstant()) {
727  HConstant* constant = HConstant::cast(right_value);
728  right = chunk_->DefineConstantOperand(constant);
729  constant_value = constant->Integer32Value() & 0x1f;
730  } else {
731  right = UseFixed(right_value, rcx);
732  }
733 
734  // Shift operations can only deoptimize if we do a logical shift by 0 and
735  // the result cannot be truncated to int32.
736  bool does_deopt = false;
737  if (op == Token::SHR && constant_value == 0) {
738  if (FLAG_opt_safe_uint32_operations) {
739  does_deopt = !instr->CheckFlag(HInstruction::kUint32);
740  } else {
741  does_deopt = !instr->CheckUsesForFlag(HValue::kTruncatingToInt32);
742  }
743  }
744 
745  LInstruction* result =
746  DefineSameAsFirst(new(zone()) LShiftI(op, left, right, does_deopt));
747  return does_deopt ? AssignEnvironment(result) : result;
748  } else {
749  return DoArithmeticT(op, instr);
750  }
751 }
752 
753 
754 LInstruction* LChunkBuilder::DoArithmeticD(Token::Value op,
755  HArithmeticBinaryOperation* instr) {
756  ASSERT(instr->representation().IsDouble());
757  ASSERT(instr->left()->representation().IsDouble());
758  ASSERT(instr->right()->representation().IsDouble());
759  if (op == Token::MOD) {
760  LOperand* left = UseRegisterAtStart(instr->BetterLeftOperand());
761  LOperand* right = UseFixedDouble(instr->BetterRightOperand(), xmm1);
762  LArithmeticD* result = new(zone()) LArithmeticD(op, left, right);
763  return MarkAsCall(DefineSameAsFirst(result), instr);
764  } else {
765  LOperand* left = UseRegisterAtStart(instr->BetterLeftOperand());
766  LOperand* right = UseRegisterAtStart(instr->BetterRightOperand());
767  LArithmeticD* result = new(zone()) LArithmeticD(op, left, right);
768  return DefineSameAsFirst(result);
769  }
770 }
771 
772 
773 LInstruction* LChunkBuilder::DoArithmeticT(Token::Value op,
774  HBinaryOperation* instr) {
775  HValue* left = instr->left();
776  HValue* right = instr->right();
777  ASSERT(left->representation().IsTagged());
778  ASSERT(right->representation().IsTagged());
779  LOperand* context = UseFixed(instr->context(), rsi);
780  LOperand* left_operand = UseFixed(left, rdx);
781  LOperand* right_operand = UseFixed(right, rax);
782  LArithmeticT* result =
783  new(zone()) LArithmeticT(op, context, left_operand, right_operand);
784  return MarkAsCall(DefineFixed(result, rax), instr);
785 }
786 
787 
788 void LChunkBuilder::DoBasicBlock(HBasicBlock* block, HBasicBlock* next_block) {
789  ASSERT(is_building());
790  current_block_ = block;
791  next_block_ = next_block;
792  if (block->IsStartBlock()) {
793  block->UpdateEnvironment(graph_->start_environment());
794  argument_count_ = 0;
795  } else if (block->predecessors()->length() == 1) {
796  // We have a single predecessor => copy environment and outgoing
797  // argument count from the predecessor.
798  ASSERT(block->phis()->length() == 0);
799  HBasicBlock* pred = block->predecessors()->at(0);
800  HEnvironment* last_environment = pred->last_environment();
801  ASSERT(last_environment != NULL);
802  // Only copy the environment, if it is later used again.
803  if (pred->end()->SecondSuccessor() == NULL) {
804  ASSERT(pred->end()->FirstSuccessor() == block);
805  } else {
806  if (pred->end()->FirstSuccessor()->block_id() > block->block_id() ||
807  pred->end()->SecondSuccessor()->block_id() > block->block_id()) {
808  last_environment = last_environment->Copy();
809  }
810  }
811  block->UpdateEnvironment(last_environment);
812  ASSERT(pred->argument_count() >= 0);
813  argument_count_ = pred->argument_count();
814  } else {
815  // We are at a state join => process phis.
816  HBasicBlock* pred = block->predecessors()->at(0);
817  // No need to copy the environment, it cannot be used later.
818  HEnvironment* last_environment = pred->last_environment();
819  for (int i = 0; i < block->phis()->length(); ++i) {
820  HPhi* phi = block->phis()->at(i);
821  if (phi->HasMergedIndex()) {
822  last_environment->SetValueAt(phi->merged_index(), phi);
823  }
824  }
825  for (int i = 0; i < block->deleted_phis()->length(); ++i) {
826  if (block->deleted_phis()->at(i) < last_environment->length()) {
827  last_environment->SetValueAt(block->deleted_phis()->at(i),
828  graph_->GetConstantUndefined());
829  }
830  }
831  block->UpdateEnvironment(last_environment);
832  // Pick up the outgoing argument count of one of the predecessors.
833  argument_count_ = pred->argument_count();
834  }
835  HInstruction* current = block->first();
836  int start = chunk_->instructions()->length();
837  while (current != NULL && !is_aborted()) {
838  // Code for constants in registers is generated lazily.
839  if (!current->EmitAtUses()) {
840  VisitInstruction(current);
841  }
842  current = current->next();
843  }
844  int end = chunk_->instructions()->length() - 1;
845  if (end >= start) {
846  block->set_first_instruction_index(start);
847  block->set_last_instruction_index(end);
848  }
849  block->set_argument_count(argument_count_);
850  next_block_ = NULL;
851  current_block_ = NULL;
852 }
853 
854 
855 void LChunkBuilder::VisitInstruction(HInstruction* current) {
856  HInstruction* old_current = current_instruction_;
857  current_instruction_ = current;
858 
859  LInstruction* instr = NULL;
860  if (current->CanReplaceWithDummyUses()) {
861  if (current->OperandCount() == 0) {
862  instr = DefineAsRegister(new(zone()) LDummy());
863  } else {
864  ASSERT(!current->OperandAt(0)->IsControlInstruction());
865  instr = DefineAsRegister(new(zone())
866  LDummyUse(UseAny(current->OperandAt(0))));
867  }
868  for (int i = 1; i < current->OperandCount(); ++i) {
869  if (current->OperandAt(i)->IsControlInstruction()) continue;
870  LInstruction* dummy =
871  new(zone()) LDummyUse(UseAny(current->OperandAt(i)));
872  dummy->set_hydrogen_value(current);
873  chunk_->AddInstruction(dummy, current_block_);
874  }
875  } else {
876  instr = current->CompileToLithium(this);
877  }
878 
879  argument_count_ += current->argument_delta();
880  ASSERT(argument_count_ >= 0);
881 
882  if (instr != NULL) {
883  // Associate the hydrogen instruction first, since we may need it for
884  // the ClobbersRegisters() or ClobbersDoubleRegisters() calls below.
885  instr->set_hydrogen_value(current);
886 
887 #if DEBUG
888  // Make sure that the lithium instruction has either no fixed register
889  // constraints in temps or the result OR no uses that are only used at
890  // start. If this invariant doesn't hold, the register allocator can decide
891  // to insert a split of a range immediately before the instruction due to an
892  // already allocated register needing to be used for the instruction's fixed
893  // register constraint. In this case, The register allocator won't see an
894  // interference between the split child and the use-at-start (it would if
895  // the it was just a plain use), so it is free to move the split child into
896  // the same register that is used for the use-at-start.
897  // See https://code.google.com/p/chromium/issues/detail?id=201590
898  if (!(instr->ClobbersRegisters() && instr->ClobbersDoubleRegisters())) {
899  int fixed = 0;
900  int used_at_start = 0;
901  for (UseIterator it(instr); !it.Done(); it.Advance()) {
902  LUnallocated* operand = LUnallocated::cast(it.Current());
903  if (operand->IsUsedAtStart()) ++used_at_start;
904  }
905  if (instr->Output() != NULL) {
906  if (LUnallocated::cast(instr->Output())->HasFixedPolicy()) ++fixed;
907  }
908  for (TempIterator it(instr); !it.Done(); it.Advance()) {
909  LUnallocated* operand = LUnallocated::cast(it.Current());
910  if (operand->HasFixedPolicy()) ++fixed;
911  }
912  ASSERT(fixed == 0 || used_at_start == 0);
913  }
914 #endif
915 
916  if (FLAG_stress_pointer_maps && !instr->HasPointerMap()) {
917  instr = AssignPointerMap(instr);
918  }
919  if (FLAG_stress_environments && !instr->HasEnvironment()) {
920  instr = AssignEnvironment(instr);
921  }
922  chunk_->AddInstruction(instr, current_block_);
923 
924  if (instr->IsCall()) {
925  HValue* hydrogen_value_for_lazy_bailout = current;
926  LInstruction* instruction_needing_environment = NULL;
927  if (current->HasObservableSideEffects()) {
928  HSimulate* sim = HSimulate::cast(current->next());
929  instruction_needing_environment = instr;
930  sim->ReplayEnvironment(current_block_->last_environment());
931  hydrogen_value_for_lazy_bailout = sim;
932  }
933  LInstruction* bailout = AssignEnvironment(new(zone()) LLazyBailout());
934  bailout->set_hydrogen_value(hydrogen_value_for_lazy_bailout);
935  chunk_->AddInstruction(bailout, current_block_);
936  if (instruction_needing_environment != NULL) {
937  // Store the lazy deopt environment with the instruction if needed.
938  // Right now it is only used for LInstanceOfKnownGlobal.
939  instruction_needing_environment->
940  SetDeferredLazyDeoptimizationEnvironment(bailout->environment());
941  }
942  }
943  }
944  current_instruction_ = old_current;
945 }
946 
947 
948 LInstruction* LChunkBuilder::DoGoto(HGoto* instr) {
949  return new(zone()) LGoto(instr->FirstSuccessor());
950 }
951 
952 
953 LInstruction* LChunkBuilder::DoDebugBreak(HDebugBreak* instr) {
954  return new(zone()) LDebugBreak();
955 }
956 
957 
958 LInstruction* LChunkBuilder::DoBranch(HBranch* instr) {
959  LInstruction* goto_instr = CheckElideControlInstruction(instr);
960  if (goto_instr != NULL) return goto_instr;
961 
962  HValue* value = instr->value();
963  Representation r = value->representation();
964  HType type = value->type();
965  ToBooleanStub::Types expected = instr->expected_input_types();
966  if (expected.IsEmpty()) expected = ToBooleanStub::Types::Generic();
967 
968  bool easy_case = !r.IsTagged() || type.IsBoolean() || type.IsSmi() ||
969  type.IsJSArray() || type.IsHeapNumber() || type.IsString();
970  LInstruction* branch = new(zone()) LBranch(UseRegister(value));
971  if (!easy_case &&
972  ((!expected.Contains(ToBooleanStub::SMI) && expected.NeedsMap()) ||
973  !expected.IsGeneric())) {
974  branch = AssignEnvironment(branch);
975  }
976  return branch;
977 }
978 
979 
980 LInstruction* LChunkBuilder::DoCompareMap(HCompareMap* instr) {
981  LInstruction* goto_instr = CheckElideControlInstruction(instr);
982  if (goto_instr != NULL) return goto_instr;
983 
984  ASSERT(instr->value()->representation().IsTagged());
985  LOperand* value = UseRegisterAtStart(instr->value());
986  return new(zone()) LCmpMapAndBranch(value);
987 }
988 
989 
990 LInstruction* LChunkBuilder::DoArgumentsLength(HArgumentsLength* length) {
991  info()->MarkAsRequiresFrame();
992  return DefineAsRegister(new(zone()) LArgumentsLength(Use(length->value())));
993 }
994 
995 
996 LInstruction* LChunkBuilder::DoArgumentsElements(HArgumentsElements* elems) {
997  info()->MarkAsRequiresFrame();
998  return DefineAsRegister(new(zone()) LArgumentsElements);
999 }
1000 
1001 
1002 LInstruction* LChunkBuilder::DoInstanceOf(HInstanceOf* instr) {
1003  LOperand* left = UseFixed(instr->left(), rax);
1004  LOperand* right = UseFixed(instr->right(), rdx);
1005  LOperand* context = UseFixed(instr->context(), rsi);
1006  LInstanceOf* result = new(zone()) LInstanceOf(context, left, right);
1007  return MarkAsCall(DefineFixed(result, rax), instr);
1008 }
1009 
1010 
1011 LInstruction* LChunkBuilder::DoInstanceOfKnownGlobal(
1012  HInstanceOfKnownGlobal* instr) {
1013  LInstanceOfKnownGlobal* result =
1014  new(zone()) LInstanceOfKnownGlobal(UseFixed(instr->context(), rsi),
1015  UseFixed(instr->left(), rax),
1016  FixedTemp(rdi));
1017  return MarkAsCall(DefineFixed(result, rax), instr);
1018 }
1019 
1020 
1021 LInstruction* LChunkBuilder::DoWrapReceiver(HWrapReceiver* instr) {
1022  LOperand* receiver = UseRegister(instr->receiver());
1023  LOperand* function = UseRegisterAtStart(instr->function());
1024  LWrapReceiver* result = new(zone()) LWrapReceiver(receiver, function);
1025  return AssignEnvironment(DefineSameAsFirst(result));
1026 }
1027 
1028 
1029 LInstruction* LChunkBuilder::DoApplyArguments(HApplyArguments* instr) {
1030  LOperand* function = UseFixed(instr->function(), rdi);
1031  LOperand* receiver = UseFixed(instr->receiver(), rax);
1032  LOperand* length = UseFixed(instr->length(), rbx);
1033  LOperand* elements = UseFixed(instr->elements(), rcx);
1034  LApplyArguments* result = new(zone()) LApplyArguments(function,
1035  receiver,
1036  length,
1037  elements);
1038  return MarkAsCall(DefineFixed(result, rax), instr, CAN_DEOPTIMIZE_EAGERLY);
1039 }
1040 
1041 
1042 LInstruction* LChunkBuilder::DoPushArgument(HPushArgument* instr) {
1043  LOperand* argument = UseOrConstant(instr->argument());
1044  return new(zone()) LPushArgument(argument);
1045 }
1046 
1047 
1048 LInstruction* LChunkBuilder::DoStoreCodeEntry(
1049  HStoreCodeEntry* store_code_entry) {
1050  LOperand* function = UseRegister(store_code_entry->function());
1051  LOperand* code_object = UseTempRegister(store_code_entry->code_object());
1052  return new(zone()) LStoreCodeEntry(function, code_object);
1053 }
1054 
1055 
1056 LInstruction* LChunkBuilder::DoInnerAllocatedObject(
1057  HInnerAllocatedObject* instr) {
1058  LOperand* base_object = UseRegisterAtStart(instr->base_object());
1059  LOperand* offset = UseRegisterOrConstantAtStart(instr->offset());
1060  return DefineAsRegister(
1061  new(zone()) LInnerAllocatedObject(base_object, offset));
1062 }
1063 
1064 
1065 LInstruction* LChunkBuilder::DoThisFunction(HThisFunction* instr) {
1066  return instr->HasNoUses()
1067  ? NULL
1068  : DefineAsRegister(new(zone()) LThisFunction);
1069 }
1070 
1071 
1072 LInstruction* LChunkBuilder::DoContext(HContext* instr) {
1073  if (instr->HasNoUses()) return NULL;
1074 
1075  if (info()->IsStub()) {
1076  return DefineFixed(new(zone()) LContext, rsi);
1077  }
1078 
1079  return DefineAsRegister(new(zone()) LContext);
1080 }
1081 
1082 
1083 LInstruction* LChunkBuilder::DoDeclareGlobals(HDeclareGlobals* instr) {
1084  LOperand* context = UseFixed(instr->context(), rsi);
1085  return MarkAsCall(new(zone()) LDeclareGlobals(context), instr);
1086 }
1087 
1088 
1089 LInstruction* LChunkBuilder::DoCallJSFunction(
1090  HCallJSFunction* instr) {
1091  LOperand* function = UseFixed(instr->function(), rdi);
1092 
1093  LCallJSFunction* result = new(zone()) LCallJSFunction(function);
1094 
1095  return MarkAsCall(DefineFixed(result, rax), instr);
1096 }
1097 
1098 
1099 LInstruction* LChunkBuilder::DoCallWithDescriptor(
1100  HCallWithDescriptor* instr) {
1101  const CallInterfaceDescriptor* descriptor = instr->descriptor();
1102 
1103  LOperand* target = UseRegisterOrConstantAtStart(instr->target());
1104  ZoneList<LOperand*> ops(instr->OperandCount(), zone());
1105  ops.Add(target, zone());
1106  for (int i = 1; i < instr->OperandCount(); i++) {
1107  LOperand* op = UseFixed(instr->OperandAt(i),
1108  descriptor->GetParameterRegister(i - 1));
1109  ops.Add(op, zone());
1110  }
1111 
1112  LCallWithDescriptor* result = new(zone()) LCallWithDescriptor(
1113  descriptor, ops, zone());
1114  return MarkAsCall(DefineFixed(result, rax), instr);
1115 }
1116 
1117 
1118 LInstruction* LChunkBuilder::DoInvokeFunction(HInvokeFunction* instr) {
1119  LOperand* context = UseFixed(instr->context(), rsi);
1120  LOperand* function = UseFixed(instr->function(), rdi);
1121  LInvokeFunction* result = new(zone()) LInvokeFunction(context, function);
1122  return MarkAsCall(DefineFixed(result, rax), instr, CANNOT_DEOPTIMIZE_EAGERLY);
1123 }
1124 
1125 
1126 LInstruction* LChunkBuilder::DoUnaryMathOperation(HUnaryMathOperation* instr) {
1127  switch (instr->op()) {
1128  case kMathFloor: return DoMathFloor(instr);
1129  case kMathRound: return DoMathRound(instr);
1130  case kMathAbs: return DoMathAbs(instr);
1131  case kMathLog: return DoMathLog(instr);
1132  case kMathExp: return DoMathExp(instr);
1133  case kMathSqrt: return DoMathSqrt(instr);
1134  case kMathPowHalf: return DoMathPowHalf(instr);
1135  case kMathClz32: return DoMathClz32(instr);
1136  default:
1137  UNREACHABLE();
1138  return NULL;
1139  }
1140 }
1141 
1142 
1143 LInstruction* LChunkBuilder::DoMathFloor(HUnaryMathOperation* instr) {
1144  LOperand* input = UseRegisterAtStart(instr->value());
1145  LMathFloor* result = new(zone()) LMathFloor(input);
1146  return AssignEnvironment(DefineAsRegister(result));
1147 }
1148 
1149 
1150 LInstruction* LChunkBuilder::DoMathRound(HUnaryMathOperation* instr) {
1151  LOperand* input = UseRegister(instr->value());
1152  LOperand* temp = FixedTemp(xmm4);
1153  LMathRound* result = new(zone()) LMathRound(input, temp);
1154  return AssignEnvironment(DefineAsRegister(result));
1155 }
1156 
1157 
1158 LInstruction* LChunkBuilder::DoMathAbs(HUnaryMathOperation* instr) {
1159  LOperand* context = UseAny(instr->context());
1160  LOperand* input = UseRegisterAtStart(instr->value());
1161  LInstruction* result =
1162  DefineSameAsFirst(new(zone()) LMathAbs(context, input));
1163  Representation r = instr->value()->representation();
1164  if (!r.IsDouble() && !r.IsSmiOrInteger32()) result = AssignPointerMap(result);
1165  if (!r.IsDouble()) result = AssignEnvironment(result);
1166  return result;
1167 }
1168 
1169 
1170 LInstruction* LChunkBuilder::DoMathLog(HUnaryMathOperation* instr) {
1171  ASSERT(instr->representation().IsDouble());
1172  ASSERT(instr->value()->representation().IsDouble());
1173  LOperand* input = UseRegisterAtStart(instr->value());
1174  return MarkAsCall(DefineSameAsFirst(new(zone()) LMathLog(input)), instr);
1175 }
1176 
1177 
1178 LInstruction* LChunkBuilder::DoMathClz32(HUnaryMathOperation* instr) {
1179  LOperand* input = UseRegisterAtStart(instr->value());
1180  LMathClz32* result = new(zone()) LMathClz32(input);
1181  return DefineAsRegister(result);
1182 }
1183 
1184 
1185 LInstruction* LChunkBuilder::DoMathExp(HUnaryMathOperation* instr) {
1186  ASSERT(instr->representation().IsDouble());
1187  ASSERT(instr->value()->representation().IsDouble());
1188  LOperand* value = UseTempRegister(instr->value());
1189  LOperand* temp1 = TempRegister();
1190  LOperand* temp2 = TempRegister();
1191  LMathExp* result = new(zone()) LMathExp(value, temp1, temp2);
1192  return DefineAsRegister(result);
1193 }
1194 
1195 
1196 LInstruction* LChunkBuilder::DoMathSqrt(HUnaryMathOperation* instr) {
1197  LOperand* input = UseRegisterAtStart(instr->value());
1198  LMathSqrt* result = new(zone()) LMathSqrt(input);
1199  return DefineSameAsFirst(result);
1200 }
1201 
1202 
1203 LInstruction* LChunkBuilder::DoMathPowHalf(HUnaryMathOperation* instr) {
1204  LOperand* input = UseRegisterAtStart(instr->value());
1205  LMathPowHalf* result = new(zone()) LMathPowHalf(input);
1206  return DefineSameAsFirst(result);
1207 }
1208 
1209 
1210 LInstruction* LChunkBuilder::DoCallNew(HCallNew* instr) {
1211  LOperand* context = UseFixed(instr->context(), rsi);
1212  LOperand* constructor = UseFixed(instr->constructor(), rdi);
1213  LCallNew* result = new(zone()) LCallNew(context, constructor);
1214  return MarkAsCall(DefineFixed(result, rax), instr);
1215 }
1216 
1217 
1218 LInstruction* LChunkBuilder::DoCallNewArray(HCallNewArray* instr) {
1219  LOperand* context = UseFixed(instr->context(), rsi);
1220  LOperand* constructor = UseFixed(instr->constructor(), rdi);
1221  LCallNewArray* result = new(zone()) LCallNewArray(context, constructor);
1222  return MarkAsCall(DefineFixed(result, rax), instr);
1223 }
1224 
1225 
1226 LInstruction* LChunkBuilder::DoCallFunction(HCallFunction* instr) {
1227  LOperand* context = UseFixed(instr->context(), rsi);
1228  LOperand* function = UseFixed(instr->function(), rdi);
1229  LCallFunction* call = new(zone()) LCallFunction(context, function);
1230  return MarkAsCall(DefineFixed(call, rax), instr);
1231 }
1232 
1233 
1234 LInstruction* LChunkBuilder::DoCallRuntime(HCallRuntime* instr) {
1235  LOperand* context = UseFixed(instr->context(), rsi);
1236  LCallRuntime* result = new(zone()) LCallRuntime(context);
1237  return MarkAsCall(DefineFixed(result, rax), instr);
1238 }
1239 
1240 
1241 LInstruction* LChunkBuilder::DoRor(HRor* instr) {
1242  return DoShift(Token::ROR, instr);
1243 }
1244 
1245 
1246 LInstruction* LChunkBuilder::DoShr(HShr* instr) {
1247  return DoShift(Token::SHR, instr);
1248 }
1249 
1250 
1251 LInstruction* LChunkBuilder::DoSar(HSar* instr) {
1252  return DoShift(Token::SAR, instr);
1253 }
1254 
1255 
1256 LInstruction* LChunkBuilder::DoShl(HShl* instr) {
1257  return DoShift(Token::SHL, instr);
1258 }
1259 
1260 
1261 LInstruction* LChunkBuilder::DoBitwise(HBitwise* instr) {
1262  if (instr->representation().IsSmiOrInteger32()) {
1263  ASSERT(instr->left()->representation().Equals(instr->representation()));
1264  ASSERT(instr->right()->representation().Equals(instr->representation()));
1265  ASSERT(instr->CheckFlag(HValue::kTruncatingToInt32));
1266 
1267  LOperand* left = UseRegisterAtStart(instr->BetterLeftOperand());
1268  LOperand* right = UseOrConstantAtStart(instr->BetterRightOperand());
1269  return DefineSameAsFirst(new(zone()) LBitI(left, right));
1270  } else {
1271  return DoArithmeticT(instr->op(), instr);
1272  }
1273 }
1274 
1275 
1276 LInstruction* LChunkBuilder::DoDivByPowerOf2I(HDiv* instr) {
1277  ASSERT(instr->representation().IsSmiOrInteger32());
1278  ASSERT(instr->left()->representation().Equals(instr->representation()));
1279  ASSERT(instr->right()->representation().Equals(instr->representation()));
1280  LOperand* dividend = UseRegister(instr->left());
1281  int32_t divisor = instr->right()->GetInteger32Constant();
1282  LInstruction* result = DefineAsRegister(new(zone()) LDivByPowerOf2I(
1283  dividend, divisor));
1284  if ((instr->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) ||
1285  (instr->CheckFlag(HValue::kCanOverflow) && divisor == -1) ||
1286  (!instr->CheckFlag(HInstruction::kAllUsesTruncatingToInt32) &&
1287  divisor != 1 && divisor != -1)) {
1288  result = AssignEnvironment(result);
1289  }
1290  return result;
1291 }
1292 
1293 
1294 LInstruction* LChunkBuilder::DoDivByConstI(HDiv* instr) {
1295  ASSERT(instr->representation().IsInteger32());
1296  ASSERT(instr->left()->representation().Equals(instr->representation()));
1297  ASSERT(instr->right()->representation().Equals(instr->representation()));
1298  LOperand* dividend = UseRegister(instr->left());
1299  int32_t divisor = instr->right()->GetInteger32Constant();
1300  LOperand* temp1 = FixedTemp(rax);
1301  LOperand* temp2 = FixedTemp(rdx);
1302  LInstruction* result = DefineFixed(new(zone()) LDivByConstI(
1303  dividend, divisor, temp1, temp2), rdx);
1304  if (divisor == 0 ||
1305  (instr->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) ||
1306  !instr->CheckFlag(HInstruction::kAllUsesTruncatingToInt32)) {
1307  result = AssignEnvironment(result);
1308  }
1309  return result;
1310 }
1311 
1312 
1313 LInstruction* LChunkBuilder::DoDivI(HBinaryOperation* instr) {
1314  ASSERT(instr->representation().IsSmiOrInteger32());
1315  ASSERT(instr->left()->representation().Equals(instr->representation()));
1316  ASSERT(instr->right()->representation().Equals(instr->representation()));
1317  LOperand* dividend = UseFixed(instr->left(), rax);
1318  LOperand* divisor = UseRegister(instr->right());
1319  LOperand* temp = FixedTemp(rdx);
1320  LInstruction* result = DefineFixed(new(zone()) LDivI(
1321  dividend, divisor, temp), rax);
1322  if (instr->CheckFlag(HValue::kCanBeDivByZero) ||
1323  instr->CheckFlag(HValue::kBailoutOnMinusZero) ||
1324  instr->CheckFlag(HValue::kCanOverflow) ||
1325  (!instr->IsMathFloorOfDiv() &&
1326  !instr->CheckFlag(HValue::kAllUsesTruncatingToInt32))) {
1327  result = AssignEnvironment(result);
1328  }
1329  return result;
1330 }
1331 
1332 
1333 LInstruction* LChunkBuilder::DoDiv(HDiv* instr) {
1334  if (instr->representation().IsSmiOrInteger32()) {
1335  if (instr->RightIsPowerOf2()) {
1336  return DoDivByPowerOf2I(instr);
1337  } else if (instr->right()->IsConstant()) {
1338  return DoDivByConstI(instr);
1339  } else {
1340  return DoDivI(instr);
1341  }
1342  } else if (instr->representation().IsDouble()) {
1343  return DoArithmeticD(Token::DIV, instr);
1344  } else {
1345  return DoArithmeticT(Token::DIV, instr);
1346  }
1347 }
1348 
1349 
1350 LInstruction* LChunkBuilder::DoFlooringDivByPowerOf2I(HMathFloorOfDiv* instr) {
1351  LOperand* dividend = UseRegisterAtStart(instr->left());
1352  int32_t divisor = instr->right()->GetInteger32Constant();
1353  LInstruction* result = DefineSameAsFirst(new(zone()) LFlooringDivByPowerOf2I(
1354  dividend, divisor));
1355  if ((instr->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) ||
1356  (instr->CheckFlag(HValue::kLeftCanBeMinInt) && divisor == -1)) {
1357  result = AssignEnvironment(result);
1358  }
1359  return result;
1360 }
1361 
1362 
1363 LInstruction* LChunkBuilder::DoFlooringDivByConstI(HMathFloorOfDiv* instr) {
1364  ASSERT(instr->representation().IsInteger32());
1365  ASSERT(instr->left()->representation().Equals(instr->representation()));
1366  ASSERT(instr->right()->representation().Equals(instr->representation()));
1367  LOperand* dividend = UseRegister(instr->left());
1368  int32_t divisor = instr->right()->GetInteger32Constant();
1369  LOperand* temp1 = FixedTemp(rax);
1370  LOperand* temp2 = FixedTemp(rdx);
1371  LOperand* temp3 =
1372  ((divisor > 0 && !instr->CheckFlag(HValue::kLeftCanBeNegative)) ||
1373  (divisor < 0 && !instr->CheckFlag(HValue::kLeftCanBePositive))) ?
1374  NULL : TempRegister();
1375  LInstruction* result =
1376  DefineFixed(new(zone()) LFlooringDivByConstI(dividend,
1377  divisor,
1378  temp1,
1379  temp2,
1380  temp3),
1381  rdx);
1382  if (divisor == 0 ||
1383  (instr->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0)) {
1384  result = AssignEnvironment(result);
1385  }
1386  return result;
1387 }
1388 
1389 
1390 LInstruction* LChunkBuilder::DoMathFloorOfDiv(HMathFloorOfDiv* instr) {
1391  if (instr->RightIsPowerOf2()) {
1392  return DoFlooringDivByPowerOf2I(instr);
1393  } else if (instr->right()->IsConstant()) {
1394  return DoFlooringDivByConstI(instr);
1395  } else {
1396  return DoDivI(instr);
1397  }
1398 }
1399 
1400 
1401 LInstruction* LChunkBuilder::DoModByPowerOf2I(HMod* instr) {
1402  ASSERT(instr->representation().IsSmiOrInteger32());
1403  ASSERT(instr->left()->representation().Equals(instr->representation()));
1404  ASSERT(instr->right()->representation().Equals(instr->representation()));
1405  LOperand* dividend = UseRegisterAtStart(instr->left());
1406  int32_t divisor = instr->right()->GetInteger32Constant();
1407  LInstruction* result = DefineSameAsFirst(new(zone()) LModByPowerOf2I(
1408  dividend, divisor));
1409  if (instr->CheckFlag(HValue::kBailoutOnMinusZero)) {
1410  result = AssignEnvironment(result);
1411  }
1412  return result;
1413 }
1414 
1415 
1416 LInstruction* LChunkBuilder::DoModByConstI(HMod* instr) {
1417  ASSERT(instr->representation().IsSmiOrInteger32());
1418  ASSERT(instr->left()->representation().Equals(instr->representation()));
1419  ASSERT(instr->right()->representation().Equals(instr->representation()));
1420  LOperand* dividend = UseRegister(instr->left());
1421  int32_t divisor = instr->right()->GetInteger32Constant();
1422  LOperand* temp1 = FixedTemp(rax);
1423  LOperand* temp2 = FixedTemp(rdx);
1424  LInstruction* result = DefineFixed(new(zone()) LModByConstI(
1425  dividend, divisor, temp1, temp2), rax);
1426  if (divisor == 0 || instr->CheckFlag(HValue::kBailoutOnMinusZero)) {
1427  result = AssignEnvironment(result);
1428  }
1429  return result;
1430 }
1431 
1432 
1433 LInstruction* LChunkBuilder::DoModI(HMod* instr) {
1434  ASSERT(instr->representation().IsSmiOrInteger32());
1435  ASSERT(instr->left()->representation().Equals(instr->representation()));
1436  ASSERT(instr->right()->representation().Equals(instr->representation()));
1437  LOperand* dividend = UseFixed(instr->left(), rax);
1438  LOperand* divisor = UseRegister(instr->right());
1439  LOperand* temp = FixedTemp(rdx);
1440  LInstruction* result = DefineFixed(new(zone()) LModI(
1441  dividend, divisor, temp), rdx);
1442  if (instr->CheckFlag(HValue::kCanBeDivByZero) ||
1443  instr->CheckFlag(HValue::kBailoutOnMinusZero)) {
1444  result = AssignEnvironment(result);
1445  }
1446  return result;
1447 }
1448 
1449 
1450 LInstruction* LChunkBuilder::DoMod(HMod* instr) {
1451  if (instr->representation().IsSmiOrInteger32()) {
1452  if (instr->RightIsPowerOf2()) {
1453  return DoModByPowerOf2I(instr);
1454  } else if (instr->right()->IsConstant()) {
1455  return DoModByConstI(instr);
1456  } else {
1457  return DoModI(instr);
1458  }
1459  } else if (instr->representation().IsDouble()) {
1460  return DoArithmeticD(Token::MOD, instr);
1461  } else {
1462  return DoArithmeticT(Token::MOD, instr);
1463  }
1464 }
1465 
1466 
1467 LInstruction* LChunkBuilder::DoMul(HMul* instr) {
1468  if (instr->representation().IsSmiOrInteger32()) {
1469  ASSERT(instr->left()->representation().Equals(instr->representation()));
1470  ASSERT(instr->right()->representation().Equals(instr->representation()));
1471  LOperand* left = UseRegisterAtStart(instr->BetterLeftOperand());
1472  LOperand* right = UseOrConstant(instr->BetterRightOperand());
1473  LMulI* mul = new(zone()) LMulI(left, right);
1474  if (instr->CheckFlag(HValue::kCanOverflow) ||
1475  instr->CheckFlag(HValue::kBailoutOnMinusZero)) {
1476  AssignEnvironment(mul);
1477  }
1478  return DefineSameAsFirst(mul);
1479  } else if (instr->representation().IsDouble()) {
1480  return DoArithmeticD(Token::MUL, instr);
1481  } else {
1482  return DoArithmeticT(Token::MUL, instr);
1483  }
1484 }
1485 
1486 
1487 LInstruction* LChunkBuilder::DoSub(HSub* instr) {
1488  if (instr->representation().IsSmiOrInteger32()) {
1489  ASSERT(instr->left()->representation().Equals(instr->representation()));
1490  ASSERT(instr->right()->representation().Equals(instr->representation()));
1491  LOperand* left = UseRegisterAtStart(instr->left());
1492  LOperand* right = UseOrConstantAtStart(instr->right());
1493  LSubI* sub = new(zone()) LSubI(left, right);
1494  LInstruction* result = DefineSameAsFirst(sub);
1495  if (instr->CheckFlag(HValue::kCanOverflow)) {
1496  result = AssignEnvironment(result);
1497  }
1498  return result;
1499  } else if (instr->representation().IsDouble()) {
1500  return DoArithmeticD(Token::SUB, instr);
1501  } else {
1502  return DoArithmeticT(Token::SUB, instr);
1503  }
1504 }
1505 
1506 
1507 LInstruction* LChunkBuilder::DoAdd(HAdd* instr) {
1508  if (instr->representation().IsSmiOrInteger32()) {
1509  // Check to see if it would be advantageous to use an lea instruction rather
1510  // than an add. This is the case when no overflow check is needed and there
1511  // are multiple uses of the add's inputs, so using a 3-register add will
1512  // preserve all input values for later uses.
1513  bool use_lea = LAddI::UseLea(instr);
1514  ASSERT(instr->left()->representation().Equals(instr->representation()));
1515  ASSERT(instr->right()->representation().Equals(instr->representation()));
1516  LOperand* left = UseRegisterAtStart(instr->BetterLeftOperand());
1517  HValue* right_candidate = instr->BetterRightOperand();
1518  LOperand* right = use_lea
1519  ? UseRegisterOrConstantAtStart(right_candidate)
1520  : UseOrConstantAtStart(right_candidate);
1521  LAddI* add = new(zone()) LAddI(left, right);
1522  bool can_overflow = instr->CheckFlag(HValue::kCanOverflow);
1523  LInstruction* result = use_lea
1524  ? DefineAsRegister(add)
1525  : DefineSameAsFirst(add);
1526  if (can_overflow) {
1527  result = AssignEnvironment(result);
1528  }
1529  return result;
1530  } else if (instr->representation().IsExternal()) {
1531  ASSERT(instr->left()->representation().IsExternal());
1532  ASSERT(instr->right()->representation().IsInteger32());
1533  ASSERT(!instr->CheckFlag(HValue::kCanOverflow));
1534  bool use_lea = LAddI::UseLea(instr);
1535  LOperand* left = UseRegisterAtStart(instr->left());
1536  HValue* right_candidate = instr->right();
1537  LOperand* right = use_lea
1538  ? UseRegisterOrConstantAtStart(right_candidate)
1539  : UseOrConstantAtStart(right_candidate);
1540  LAddI* add = new(zone()) LAddI(left, right);
1541  LInstruction* result = use_lea
1542  ? DefineAsRegister(add)
1543  : DefineSameAsFirst(add);
1544  return result;
1545  } else if (instr->representation().IsDouble()) {
1546  return DoArithmeticD(Token::ADD, instr);
1547  } else {
1548  return DoArithmeticT(Token::ADD, instr);
1549  }
1550  return NULL;
1551 }
1552 
1553 
1554 LInstruction* LChunkBuilder::DoMathMinMax(HMathMinMax* instr) {
1555  LOperand* left = NULL;
1556  LOperand* right = NULL;
1557  ASSERT(instr->left()->representation().Equals(instr->representation()));
1558  ASSERT(instr->right()->representation().Equals(instr->representation()));
1559  if (instr->representation().IsSmi()) {
1560  left = UseRegisterAtStart(instr->BetterLeftOperand());
1561  right = UseAtStart(instr->BetterRightOperand());
1562  } else if (instr->representation().IsInteger32()) {
1563  left = UseRegisterAtStart(instr->BetterLeftOperand());
1564  right = UseOrConstantAtStart(instr->BetterRightOperand());
1565  } else {
1566  ASSERT(instr->representation().IsDouble());
1567  left = UseRegisterAtStart(instr->left());
1568  right = UseRegisterAtStart(instr->right());
1569  }
1570  LMathMinMax* minmax = new(zone()) LMathMinMax(left, right);
1571  return DefineSameAsFirst(minmax);
1572 }
1573 
1574 
1575 LInstruction* LChunkBuilder::DoPower(HPower* instr) {
1576  ASSERT(instr->representation().IsDouble());
1577  // We call a C function for double power. It can't trigger a GC.
1578  // We need to use fixed result register for the call.
1579  Representation exponent_type = instr->right()->representation();
1580  ASSERT(instr->left()->representation().IsDouble());
1581  LOperand* left = UseFixedDouble(instr->left(), xmm2);
1582  LOperand* right = exponent_type.IsDouble() ?
1583  UseFixedDouble(instr->right(), xmm1) : UseFixed(instr->right(), rdx);
1584  LPower* result = new(zone()) LPower(left, right);
1585  return MarkAsCall(DefineFixedDouble(result, xmm3), instr,
1586  CAN_DEOPTIMIZE_EAGERLY);
1587 }
1588 
1589 
1590 LInstruction* LChunkBuilder::DoCompareGeneric(HCompareGeneric* instr) {
1591  ASSERT(instr->left()->representation().IsTagged());
1592  ASSERT(instr->right()->representation().IsTagged());
1593  LOperand* context = UseFixed(instr->context(), rsi);
1594  LOperand* left = UseFixed(instr->left(), rdx);
1595  LOperand* right = UseFixed(instr->right(), rax);
1596  LCmpT* result = new(zone()) LCmpT(context, left, right);
1597  return MarkAsCall(DefineFixed(result, rax), instr);
1598 }
1599 
1600 
1601 LInstruction* LChunkBuilder::DoCompareNumericAndBranch(
1602  HCompareNumericAndBranch* instr) {
1603  Representation r = instr->representation();
1604  if (r.IsSmiOrInteger32()) {
1605  ASSERT(instr->left()->representation().Equals(r));
1606  ASSERT(instr->right()->representation().Equals(r));
1607  LOperand* left = UseRegisterOrConstantAtStart(instr->left());
1608  LOperand* right = UseOrConstantAtStart(instr->right());
1609  return new(zone()) LCompareNumericAndBranch(left, right);
1610  } else {
1611  ASSERT(r.IsDouble());
1612  ASSERT(instr->left()->representation().IsDouble());
1613  ASSERT(instr->right()->representation().IsDouble());
1614  LOperand* left;
1615  LOperand* right;
1616  if (instr->left()->IsConstant() && instr->right()->IsConstant()) {
1617  left = UseRegisterOrConstantAtStart(instr->left());
1618  right = UseRegisterOrConstantAtStart(instr->right());
1619  } else {
1620  left = UseRegisterAtStart(instr->left());
1621  right = UseRegisterAtStart(instr->right());
1622  }
1623  return new(zone()) LCompareNumericAndBranch(left, right);
1624  }
1625 }
1626 
1627 
1628 LInstruction* LChunkBuilder::DoCompareObjectEqAndBranch(
1629  HCompareObjectEqAndBranch* instr) {
1630  LInstruction* goto_instr = CheckElideControlInstruction(instr);
1631  if (goto_instr != NULL) return goto_instr;
1632  LOperand* left = UseRegisterAtStart(instr->left());
1633  LOperand* right = UseRegisterOrConstantAtStart(instr->right());
1634  return new(zone()) LCmpObjectEqAndBranch(left, right);
1635 }
1636 
1637 
1638 LInstruction* LChunkBuilder::DoCompareHoleAndBranch(
1639  HCompareHoleAndBranch* instr) {
1640  LOperand* value = UseRegisterAtStart(instr->value());
1641  return new(zone()) LCmpHoleAndBranch(value);
1642 }
1643 
1644 
1645 LInstruction* LChunkBuilder::DoCompareMinusZeroAndBranch(
1646  HCompareMinusZeroAndBranch* instr) {
1647  LInstruction* goto_instr = CheckElideControlInstruction(instr);
1648  if (goto_instr != NULL) return goto_instr;
1649  LOperand* value = UseRegister(instr->value());
1650  return new(zone()) LCompareMinusZeroAndBranch(value);
1651 }
1652 
1653 
1654 LInstruction* LChunkBuilder::DoIsObjectAndBranch(HIsObjectAndBranch* instr) {
1655  ASSERT(instr->value()->representation().IsTagged());
1656  return new(zone()) LIsObjectAndBranch(UseRegisterAtStart(instr->value()));
1657 }
1658 
1659 
1660 LInstruction* LChunkBuilder::DoIsStringAndBranch(HIsStringAndBranch* instr) {
1661  ASSERT(instr->value()->representation().IsTagged());
1662  LOperand* value = UseRegisterAtStart(instr->value());
1663  LOperand* temp = TempRegister();
1664  return new(zone()) LIsStringAndBranch(value, temp);
1665 }
1666 
1667 
1668 LInstruction* LChunkBuilder::DoIsSmiAndBranch(HIsSmiAndBranch* instr) {
1669  ASSERT(instr->value()->representation().IsTagged());
1670  return new(zone()) LIsSmiAndBranch(Use(instr->value()));
1671 }
1672 
1673 
1674 LInstruction* LChunkBuilder::DoIsUndetectableAndBranch(
1675  HIsUndetectableAndBranch* instr) {
1676  ASSERT(instr->value()->representation().IsTagged());
1677  LOperand* value = UseRegisterAtStart(instr->value());
1678  LOperand* temp = TempRegister();
1679  return new(zone()) LIsUndetectableAndBranch(value, temp);
1680 }
1681 
1682 
1683 LInstruction* LChunkBuilder::DoStringCompareAndBranch(
1684  HStringCompareAndBranch* instr) {
1685 
1686  ASSERT(instr->left()->representation().IsTagged());
1687  ASSERT(instr->right()->representation().IsTagged());
1688  LOperand* context = UseFixed(instr->context(), rsi);
1689  LOperand* left = UseFixed(instr->left(), rdx);
1690  LOperand* right = UseFixed(instr->right(), rax);
1691  LStringCompareAndBranch* result =
1692  new(zone()) LStringCompareAndBranch(context, left, right);
1693 
1694  return MarkAsCall(result, instr);
1695 }
1696 
1697 
1698 LInstruction* LChunkBuilder::DoHasInstanceTypeAndBranch(
1699  HHasInstanceTypeAndBranch* instr) {
1700  ASSERT(instr->value()->representation().IsTagged());
1701  LOperand* value = UseRegisterAtStart(instr->value());
1702  return new(zone()) LHasInstanceTypeAndBranch(value);
1703 }
1704 
1705 
1706 LInstruction* LChunkBuilder::DoGetCachedArrayIndex(
1707  HGetCachedArrayIndex* instr) {
1708  ASSERT(instr->value()->representation().IsTagged());
1709  LOperand* value = UseRegisterAtStart(instr->value());
1710 
1711  return DefineAsRegister(new(zone()) LGetCachedArrayIndex(value));
1712 }
1713 
1714 
1715 LInstruction* LChunkBuilder::DoHasCachedArrayIndexAndBranch(
1716  HHasCachedArrayIndexAndBranch* instr) {
1717  ASSERT(instr->value()->representation().IsTagged());
1718  LOperand* value = UseRegisterAtStart(instr->value());
1719  return new(zone()) LHasCachedArrayIndexAndBranch(value);
1720 }
1721 
1722 
1723 LInstruction* LChunkBuilder::DoClassOfTestAndBranch(
1724  HClassOfTestAndBranch* instr) {
1725  LOperand* value = UseRegister(instr->value());
1726  return new(zone()) LClassOfTestAndBranch(value,
1727  TempRegister(),
1728  TempRegister());
1729 }
1730 
1731 
1732 LInstruction* LChunkBuilder::DoMapEnumLength(HMapEnumLength* instr) {
1733  LOperand* map = UseRegisterAtStart(instr->value());
1734  return DefineAsRegister(new(zone()) LMapEnumLength(map));
1735 }
1736 
1737 
1738 LInstruction* LChunkBuilder::DoDateField(HDateField* instr) {
1739  LOperand* object = UseFixed(instr->value(), rax);
1740  LDateField* result = new(zone()) LDateField(object, instr->index());
1741  return MarkAsCall(DefineFixed(result, rax), instr, CAN_DEOPTIMIZE_EAGERLY);
1742 }
1743 
1744 
1745 LInstruction* LChunkBuilder::DoSeqStringGetChar(HSeqStringGetChar* instr) {
1746  LOperand* string = UseRegisterAtStart(instr->string());
1747  LOperand* index = UseRegisterOrConstantAtStart(instr->index());
1748  return DefineAsRegister(new(zone()) LSeqStringGetChar(string, index));
1749 }
1750 
1751 
1752 LInstruction* LChunkBuilder::DoSeqStringSetChar(HSeqStringSetChar* instr) {
1753  LOperand* string = UseRegisterAtStart(instr->string());
1754  LOperand* index = FLAG_debug_code
1755  ? UseRegisterAtStart(instr->index())
1756  : UseRegisterOrConstantAtStart(instr->index());
1757  LOperand* value = FLAG_debug_code
1758  ? UseRegisterAtStart(instr->value())
1759  : UseRegisterOrConstantAtStart(instr->value());
1760  LOperand* context = FLAG_debug_code ? UseFixed(instr->context(), rsi) : NULL;
1761  LInstruction* result = new(zone()) LSeqStringSetChar(context, string,
1762  index, value);
1763  if (FLAG_debug_code) {
1764  result = MarkAsCall(result, instr);
1765  }
1766  return result;
1767 }
1768 
1769 
1770 LInstruction* LChunkBuilder::DoBoundsCheck(HBoundsCheck* instr) {
1771  LOperand* value = UseRegisterOrConstantAtStart(instr->index());
1772  LOperand* length = Use(instr->length());
1773  return AssignEnvironment(new(zone()) LBoundsCheck(value, length));
1774 }
1775 
1776 
1777 LInstruction* LChunkBuilder::DoBoundsCheckBaseIndexInformation(
1778  HBoundsCheckBaseIndexInformation* instr) {
1779  UNREACHABLE();
1780  return NULL;
1781 }
1782 
1783 
1784 LInstruction* LChunkBuilder::DoAbnormalExit(HAbnormalExit* instr) {
1785  // The control instruction marking the end of a block that completed
1786  // abruptly (e.g., threw an exception). There is nothing specific to do.
1787  return NULL;
1788 }
1789 
1790 
1791 LInstruction* LChunkBuilder::DoUseConst(HUseConst* instr) {
1792  return NULL;
1793 }
1794 
1795 
1796 LInstruction* LChunkBuilder::DoForceRepresentation(HForceRepresentation* bad) {
1797  // All HForceRepresentation instructions should be eliminated in the
1798  // representation change phase of Hydrogen.
1799  UNREACHABLE();
1800  return NULL;
1801 }
1802 
1803 
1804 LInstruction* LChunkBuilder::DoChange(HChange* instr) {
1805  Representation from = instr->from();
1806  Representation to = instr->to();
1807  if (from.IsSmi()) {
1808  if (to.IsTagged()) {
1809  LOperand* value = UseRegister(instr->value());
1810  return DefineSameAsFirst(new(zone()) LDummyUse(value));
1811  }
1812  from = Representation::Tagged();
1813  }
1814  // Only mark conversions that might need to allocate as calling rather than
1815  // all changes. This makes simple, non-allocating conversion not have to force
1816  // building a stack frame.
1817  if (from.IsTagged()) {
1818  if (to.IsDouble()) {
1819  LOperand* value = UseRegister(instr->value());
1820  LInstruction* res = DefineAsRegister(new(zone()) LNumberUntagD(value));
1821  if (!instr->value()->representation().IsSmi()) {
1822  res = AssignEnvironment(res);
1823  }
1824  return res;
1825  } else if (to.IsSmi()) {
1826  HValue* val = instr->value();
1827  LOperand* value = UseRegister(val);
1828  if (val->type().IsSmi()) {
1829  return DefineSameAsFirst(new(zone()) LDummyUse(value));
1830  }
1831  return AssignEnvironment(DefineSameAsFirst(new(zone()) LCheckSmi(value)));
1832  } else {
1833  ASSERT(to.IsInteger32());
1834  HValue* val = instr->value();
1835  LOperand* value = UseRegister(val);
1836  if (val->type().IsSmi() || val->representation().IsSmi()) {
1837  return DefineSameAsFirst(new(zone()) LSmiUntag(value, false));
1838  } else {
1839  bool truncating = instr->CanTruncateToInt32();
1840  LOperand* xmm_temp = truncating ? NULL : FixedTemp(xmm1);
1841  LInstruction* res =
1842  DefineSameAsFirst(new(zone()) LTaggedToI(value, xmm_temp));
1843  if (!instr->value()->representation().IsSmi()) {
1844  // Note: Only deopts in deferred code.
1845  res = AssignEnvironment(res);
1846  }
1847  return res;
1848  }
1849  }
1850  } else if (from.IsDouble()) {
1851  if (to.IsTagged()) {
1852  info()->MarkAsDeferredCalling();
1853  LOperand* value = UseRegister(instr->value());
1854  LOperand* temp = TempRegister();
1855 
1856  // Make sure that temp and result_temp are different registers.
1857  LUnallocated* result_temp = TempRegister();
1858  LNumberTagD* result = new(zone()) LNumberTagD(value, temp);
1859  return AssignPointerMap(Define(result, result_temp));
1860  } else if (to.IsSmi()) {
1861  LOperand* value = UseRegister(instr->value());
1862  return AssignEnvironment(
1863  DefineAsRegister(new(zone()) LDoubleToSmi(value)));
1864  } else {
1865  ASSERT(to.IsInteger32());
1866  LOperand* value = UseRegister(instr->value());
1867  LInstruction* result = DefineAsRegister(new(zone()) LDoubleToI(value));
1868  if (!instr->CanTruncateToInt32()) {
1869  result = AssignEnvironment(result);
1870  }
1871  return result;
1872  }
1873  } else if (from.IsInteger32()) {
1874  info()->MarkAsDeferredCalling();
1875  if (to.IsTagged()) {
1876  HValue* val = instr->value();
1877  LOperand* value = UseRegister(val);
1878  if (!instr->CheckFlag(HValue::kCanOverflow)) {
1879  return DefineAsRegister(new(zone()) LSmiTag(value));
1880  } else if (val->CheckFlag(HInstruction::kUint32)) {
1881  LOperand* temp1 = TempRegister();
1882  LOperand* temp2 = FixedTemp(xmm1);
1883  LNumberTagU* result = new(zone()) LNumberTagU(value, temp1, temp2);
1884  return AssignPointerMap(DefineSameAsFirst(result));
1885  } else {
1886  LNumberTagI* result = new(zone()) LNumberTagI(value);
1887  return AssignPointerMap(DefineSameAsFirst(result));
1888  }
1889  } else if (to.IsSmi()) {
1890  HValue* val = instr->value();
1891  LOperand* value = UseRegister(val);
1892  LInstruction* result = DefineAsRegister(new(zone()) LSmiTag(value));
1893  if (instr->CheckFlag(HValue::kCanOverflow)) {
1894  ASSERT(val->CheckFlag(HValue::kUint32));
1895  result = AssignEnvironment(result);
1896  }
1897  return result;
1898  } else {
1899  if (instr->value()->CheckFlag(HInstruction::kUint32)) {
1900  LOperand* temp = FixedTemp(xmm1);
1901  return DefineAsRegister(
1902  new(zone()) LUint32ToDouble(UseRegister(instr->value()), temp));
1903  } else {
1904  ASSERT(to.IsDouble());
1905  LOperand* value = Use(instr->value());
1906  return DefineAsRegister(new(zone()) LInteger32ToDouble(value));
1907  }
1908  }
1909  }
1910  UNREACHABLE();
1911  return NULL;
1912 }
1913 
1914 
1915 LInstruction* LChunkBuilder::DoCheckHeapObject(HCheckHeapObject* instr) {
1916  LOperand* value = UseRegisterAtStart(instr->value());
1917  return AssignEnvironment(new(zone()) LCheckNonSmi(value));
1918 }
1919 
1920 
1921 LInstruction* LChunkBuilder::DoCheckSmi(HCheckSmi* instr) {
1922  LOperand* value = UseRegisterAtStart(instr->value());
1923  return AssignEnvironment(new(zone()) LCheckSmi(value));
1924 }
1925 
1926 
1927 LInstruction* LChunkBuilder::DoCheckInstanceType(HCheckInstanceType* instr) {
1928  LOperand* value = UseRegisterAtStart(instr->value());
1929  LCheckInstanceType* result = new(zone()) LCheckInstanceType(value);
1930  return AssignEnvironment(result);
1931 }
1932 
1933 
1934 LInstruction* LChunkBuilder::DoCheckValue(HCheckValue* instr) {
1935  LOperand* value = UseRegisterAtStart(instr->value());
1936  return AssignEnvironment(new(zone()) LCheckValue(value));
1937 }
1938 
1939 
1940 LInstruction* LChunkBuilder::DoCheckMaps(HCheckMaps* instr) {
1941  LOperand* value = NULL;
1942  if (!instr->CanOmitMapChecks()) {
1943  value = UseRegisterAtStart(instr->value());
1944  if (instr->has_migration_target()) info()->MarkAsDeferredCalling();
1945  }
1946  LCheckMaps* result = new(zone()) LCheckMaps(value);
1947  if (!instr->CanOmitMapChecks()) {
1948  // Note: Only deopts in deferred code.
1949  AssignEnvironment(result);
1950  if (instr->has_migration_target()) return AssignPointerMap(result);
1951  }
1952  return result;
1953 }
1954 
1955 
1956 LInstruction* LChunkBuilder::DoClampToUint8(HClampToUint8* instr) {
1957  HValue* value = instr->value();
1958  Representation input_rep = value->representation();
1959  LOperand* reg = UseRegister(value);
1960  if (input_rep.IsDouble()) {
1961  return DefineAsRegister(new(zone()) LClampDToUint8(reg));
1962  } else if (input_rep.IsInteger32()) {
1963  return DefineSameAsFirst(new(zone()) LClampIToUint8(reg));
1964  } else {
1965  ASSERT(input_rep.IsSmiOrTagged());
1966  // Register allocator doesn't (yet) support allocation of double
1967  // temps. Reserve xmm1 explicitly.
1968  LClampTToUint8* result = new(zone()) LClampTToUint8(reg,
1969  FixedTemp(xmm1));
1970  return AssignEnvironment(DefineSameAsFirst(result));
1971  }
1972 }
1973 
1974 
1975 LInstruction* LChunkBuilder::DoDoubleBits(HDoubleBits* instr) {
1976  HValue* value = instr->value();
1977  ASSERT(value->representation().IsDouble());
1978  return DefineAsRegister(new(zone()) LDoubleBits(UseRegister(value)));
1979 }
1980 
1981 
1982 LInstruction* LChunkBuilder::DoConstructDouble(HConstructDouble* instr) {
1983  LOperand* lo = UseRegister(instr->lo());
1984  LOperand* hi = UseRegister(instr->hi());
1985  return DefineAsRegister(new(zone()) LConstructDouble(hi, lo));
1986 }
1987 
1988 
1989 LInstruction* LChunkBuilder::DoReturn(HReturn* instr) {
1990  LOperand* context = info()->IsStub() ? UseFixed(instr->context(), rsi) : NULL;
1991  LOperand* parameter_count = UseRegisterOrConstant(instr->parameter_count());
1992  return new(zone()) LReturn(
1993  UseFixed(instr->value(), rax), context, parameter_count);
1994 }
1995 
1996 
1997 LInstruction* LChunkBuilder::DoConstant(HConstant* instr) {
1998  Representation r = instr->representation();
1999  if (r.IsSmi()) {
2000  return DefineAsRegister(new(zone()) LConstantS);
2001  } else if (r.IsInteger32()) {
2002  return DefineAsRegister(new(zone()) LConstantI);
2003  } else if (r.IsDouble()) {
2004  LOperand* temp = TempRegister();
2005  return DefineAsRegister(new(zone()) LConstantD(temp));
2006  } else if (r.IsExternal()) {
2007  return DefineAsRegister(new(zone()) LConstantE);
2008  } else if (r.IsTagged()) {
2009  return DefineAsRegister(new(zone()) LConstantT);
2010  } else {
2011  UNREACHABLE();
2012  return NULL;
2013  }
2014 }
2015 
2016 
2017 LInstruction* LChunkBuilder::DoLoadGlobalCell(HLoadGlobalCell* instr) {
2018  LLoadGlobalCell* result = new(zone()) LLoadGlobalCell;
2019  return instr->RequiresHoleCheck()
2020  ? AssignEnvironment(DefineAsRegister(result))
2021  : DefineAsRegister(result);
2022 }
2023 
2024 
2025 LInstruction* LChunkBuilder::DoLoadGlobalGeneric(HLoadGlobalGeneric* instr) {
2026  LOperand* context = UseFixed(instr->context(), rsi);
2027  LOperand* global_object = UseFixed(instr->global_object(), rax);
2028  LLoadGlobalGeneric* result =
2029  new(zone()) LLoadGlobalGeneric(context, global_object);
2030  return MarkAsCall(DefineFixed(result, rax), instr);
2031 }
2032 
2033 
2034 LInstruction* LChunkBuilder::DoStoreGlobalCell(HStoreGlobalCell* instr) {
2035  LOperand* value = UseRegister(instr->value());
2036  // Use a temp to avoid reloading the cell value address in the case where
2037  // we perform a hole check.
2038  return instr->RequiresHoleCheck()
2039  ? AssignEnvironment(new(zone()) LStoreGlobalCell(value, TempRegister()))
2040  : new(zone()) LStoreGlobalCell(value, NULL);
2041 }
2042 
2043 
2044 LInstruction* LChunkBuilder::DoLoadContextSlot(HLoadContextSlot* instr) {
2045  LOperand* context = UseRegisterAtStart(instr->value());
2046  LInstruction* result =
2047  DefineAsRegister(new(zone()) LLoadContextSlot(context));
2048  if (instr->RequiresHoleCheck() && instr->DeoptimizesOnHole()) {
2049  result = AssignEnvironment(result);
2050  }
2051  return result;
2052 }
2053 
2054 
2055 LInstruction* LChunkBuilder::DoStoreContextSlot(HStoreContextSlot* instr) {
2056  LOperand* context;
2057  LOperand* value;
2058  LOperand* temp;
2059  context = UseRegister(instr->context());
2060  if (instr->NeedsWriteBarrier()) {
2061  value = UseTempRegister(instr->value());
2062  temp = TempRegister();
2063  } else {
2064  value = UseRegister(instr->value());
2065  temp = NULL;
2066  }
2067  LInstruction* result = new(zone()) LStoreContextSlot(context, value, temp);
2068  if (instr->RequiresHoleCheck() && instr->DeoptimizesOnHole()) {
2069  result = AssignEnvironment(result);
2070  }
2071  return result;
2072 }
2073 
2074 
2075 LInstruction* LChunkBuilder::DoLoadNamedField(HLoadNamedField* instr) {
2076  // Use the special mov rax, moffs64 encoding for external
2077  // memory accesses with 64-bit word-sized values.
2078  if (instr->access().IsExternalMemory() &&
2079  instr->access().offset() == 0 &&
2080  (instr->access().representation().IsSmi() ||
2081  instr->access().representation().IsTagged() ||
2082  instr->access().representation().IsHeapObject() ||
2083  instr->access().representation().IsExternal())) {
2084  LOperand* obj = UseRegisterOrConstantAtStart(instr->object());
2085  return DefineFixed(new(zone()) LLoadNamedField(obj), rax);
2086  }
2087  LOperand* obj = UseRegisterAtStart(instr->object());
2088  return DefineAsRegister(new(zone()) LLoadNamedField(obj));
2089 }
2090 
2091 
2092 LInstruction* LChunkBuilder::DoLoadNamedGeneric(HLoadNamedGeneric* instr) {
2093  LOperand* context = UseFixed(instr->context(), rsi);
2094  LOperand* object = UseFixed(instr->object(), rax);
2095  LLoadNamedGeneric* result = new(zone()) LLoadNamedGeneric(context, object);
2096  return MarkAsCall(DefineFixed(result, rax), instr);
2097 }
2098 
2099 
2100 LInstruction* LChunkBuilder::DoLoadFunctionPrototype(
2101  HLoadFunctionPrototype* instr) {
2102  return AssignEnvironment(DefineAsRegister(
2103  new(zone()) LLoadFunctionPrototype(UseRegister(instr->function()))));
2104 }
2105 
2106 
2107 LInstruction* LChunkBuilder::DoLoadRoot(HLoadRoot* instr) {
2108  return DefineAsRegister(new(zone()) LLoadRoot);
2109 }
2110 
2111 
2112 void LChunkBuilder::FindDehoistedKeyDefinitions(HValue* candidate) {
2113  BitVector* dehoisted_key_ids = chunk_->GetDehoistedKeyIds();
2114  if (dehoisted_key_ids->Contains(candidate->id())) return;
2115  dehoisted_key_ids->Add(candidate->id());
2116  if (!candidate->IsPhi()) return;
2117  for (int i = 0; i < candidate->OperandCount(); ++i) {
2118  FindDehoistedKeyDefinitions(candidate->OperandAt(i));
2119  }
2120 }
2121 
2122 
2123 LInstruction* LChunkBuilder::DoLoadKeyed(HLoadKeyed* instr) {
2124  ASSERT(instr->key()->representation().IsInteger32());
2125  ElementsKind elements_kind = instr->elements_kind();
2126  LOperand* key = UseRegisterOrConstantAtStart(instr->key());
2127  LInstruction* result = NULL;
2128 
2129  if (instr->IsDehoisted()) {
2130  FindDehoistedKeyDefinitions(instr->key());
2131  }
2132 
2133  if (!instr->is_typed_elements()) {
2134  LOperand* obj = UseRegisterAtStart(instr->elements());
2135  result = DefineAsRegister(new(zone()) LLoadKeyed(obj, key));
2136  } else {
2137  ASSERT(
2138  (instr->representation().IsInteger32() &&
2139  !(IsDoubleOrFloatElementsKind(elements_kind))) ||
2140  (instr->representation().IsDouble() &&
2141  (IsDoubleOrFloatElementsKind(elements_kind))));
2142  LOperand* backing_store = UseRegister(instr->elements());
2143  result = DefineAsRegister(new(zone()) LLoadKeyed(backing_store, key));
2144  }
2145 
2146  if ((instr->is_external() || instr->is_fixed_typed_array()) ?
2147  // see LCodeGen::DoLoadKeyedExternalArray
2148  ((elements_kind == EXTERNAL_UINT32_ELEMENTS ||
2149  elements_kind == UINT32_ELEMENTS) &&
2150  !instr->CheckFlag(HInstruction::kUint32)) :
2151  // see LCodeGen::DoLoadKeyedFixedDoubleArray and
2152  // LCodeGen::DoLoadKeyedFixedArray
2153  instr->RequiresHoleCheck()) {
2154  result = AssignEnvironment(result);
2155  }
2156  return result;
2157 }
2158 
2159 
2160 LInstruction* LChunkBuilder::DoLoadKeyedGeneric(HLoadKeyedGeneric* instr) {
2161  LOperand* context = UseFixed(instr->context(), rsi);
2162  LOperand* object = UseFixed(instr->object(), rdx);
2163  LOperand* key = UseFixed(instr->key(), rax);
2164 
2165  LLoadKeyedGeneric* result =
2166  new(zone()) LLoadKeyedGeneric(context, object, key);
2167  return MarkAsCall(DefineFixed(result, rax), instr);
2168 }
2169 
2170 
2171 LInstruction* LChunkBuilder::DoStoreKeyed(HStoreKeyed* instr) {
2172  ElementsKind elements_kind = instr->elements_kind();
2173 
2174  if (instr->IsDehoisted()) {
2175  FindDehoistedKeyDefinitions(instr->key());
2176  }
2177 
2178  if (!instr->is_typed_elements()) {
2179  ASSERT(instr->elements()->representation().IsTagged());
2180  bool needs_write_barrier = instr->NeedsWriteBarrier();
2181  LOperand* object = NULL;
2182  LOperand* key = NULL;
2183  LOperand* val = NULL;
2184 
2185  Representation value_representation = instr->value()->representation();
2186  if (value_representation.IsDouble()) {
2187  object = UseRegisterAtStart(instr->elements());
2188  val = UseRegisterAtStart(instr->value());
2189  key = UseRegisterOrConstantAtStart(instr->key());
2190  } else {
2191  ASSERT(value_representation.IsSmiOrTagged() ||
2192  value_representation.IsInteger32());
2193  if (needs_write_barrier) {
2194  object = UseTempRegister(instr->elements());
2195  val = UseTempRegister(instr->value());
2196  key = UseTempRegister(instr->key());
2197  } else {
2198  object = UseRegisterAtStart(instr->elements());
2199  val = UseRegisterOrConstantAtStart(instr->value());
2200  key = UseRegisterOrConstantAtStart(instr->key());
2201  }
2202  }
2203 
2204  return new(zone()) LStoreKeyed(object, key, val);
2205  }
2206 
2207  ASSERT(
2208  (instr->value()->representation().IsInteger32() &&
2209  !IsDoubleOrFloatElementsKind(elements_kind)) ||
2210  (instr->value()->representation().IsDouble() &&
2211  IsDoubleOrFloatElementsKind(elements_kind)));
2212  ASSERT((instr->is_fixed_typed_array() &&
2213  instr->elements()->representation().IsTagged()) ||
2214  (instr->is_external() &&
2215  instr->elements()->representation().IsExternal()));
2216  bool val_is_temp_register =
2217  elements_kind == EXTERNAL_UINT8_CLAMPED_ELEMENTS ||
2218  elements_kind == EXTERNAL_FLOAT32_ELEMENTS ||
2219  elements_kind == FLOAT32_ELEMENTS;
2220  LOperand* val = val_is_temp_register ? UseTempRegister(instr->value())
2221  : UseRegister(instr->value());
2222  LOperand* key = UseRegisterOrConstantAtStart(instr->key());
2223  LOperand* backing_store = UseRegister(instr->elements());
2224  return new(zone()) LStoreKeyed(backing_store, key, val);
2225 }
2226 
2227 
2228 LInstruction* LChunkBuilder::DoStoreKeyedGeneric(HStoreKeyedGeneric* instr) {
2229  LOperand* context = UseFixed(instr->context(), rsi);
2230  LOperand* object = UseFixed(instr->object(), rdx);
2231  LOperand* key = UseFixed(instr->key(), rcx);
2232  LOperand* value = UseFixed(instr->value(), rax);
2233 
2234  ASSERT(instr->object()->representation().IsTagged());
2235  ASSERT(instr->key()->representation().IsTagged());
2236  ASSERT(instr->value()->representation().IsTagged());
2237 
2238  LStoreKeyedGeneric* result =
2239  new(zone()) LStoreKeyedGeneric(context, object, key, value);
2240  return MarkAsCall(result, instr);
2241 }
2242 
2243 
2244 LInstruction* LChunkBuilder::DoTransitionElementsKind(
2245  HTransitionElementsKind* instr) {
2246  LOperand* object = UseRegister(instr->object());
2247  if (IsSimpleMapChangeTransition(instr->from_kind(), instr->to_kind())) {
2248  LOperand* object = UseRegister(instr->object());
2249  LOperand* new_map_reg = TempRegister();
2250  LOperand* temp_reg = TempRegister();
2251  LTransitionElementsKind* result = new(zone()) LTransitionElementsKind(
2252  object, NULL, new_map_reg, temp_reg);
2253  return result;
2254  } else {
2255  LOperand* context = UseFixed(instr->context(), rsi);
2256  LTransitionElementsKind* result =
2257  new(zone()) LTransitionElementsKind(object, context, NULL, NULL);
2258  return AssignPointerMap(result);
2259  }
2260 }
2261 
2262 
2263 LInstruction* LChunkBuilder::DoTrapAllocationMemento(
2264  HTrapAllocationMemento* instr) {
2265  LOperand* object = UseRegister(instr->object());
2266  LOperand* temp = TempRegister();
2267  LTrapAllocationMemento* result =
2268  new(zone()) LTrapAllocationMemento(object, temp);
2269  return AssignEnvironment(result);
2270 }
2271 
2272 
2273 LInstruction* LChunkBuilder::DoStoreNamedField(HStoreNamedField* instr) {
2274  bool is_in_object = instr->access().IsInobject();
2275  bool is_external_location = instr->access().IsExternalMemory() &&
2276  instr->access().offset() == 0;
2277  bool needs_write_barrier = instr->NeedsWriteBarrier();
2278  bool needs_write_barrier_for_map = instr->has_transition() &&
2279  instr->NeedsWriteBarrierForMap();
2280 
2281  LOperand* obj;
2282  if (needs_write_barrier) {
2283  obj = is_in_object
2284  ? UseRegister(instr->object())
2285  : UseTempRegister(instr->object());
2286  } else if (is_external_location) {
2287  ASSERT(!is_in_object);
2288  ASSERT(!needs_write_barrier);
2289  ASSERT(!needs_write_barrier_for_map);
2290  obj = UseRegisterOrConstant(instr->object());
2291  } else {
2292  obj = needs_write_barrier_for_map
2293  ? UseRegister(instr->object())
2294  : UseRegisterAtStart(instr->object());
2295  }
2296 
2297  bool can_be_constant = instr->value()->IsConstant() &&
2298  HConstant::cast(instr->value())->NotInNewSpace() &&
2299  !instr->field_representation().IsDouble();
2300 
2301  LOperand* val;
2302  if (needs_write_barrier) {
2303  val = UseTempRegister(instr->value());
2304  } else if (is_external_location) {
2305  val = UseFixed(instr->value(), rax);
2306  } else if (can_be_constant) {
2307  val = UseRegisterOrConstant(instr->value());
2308  } else if (instr->field_representation().IsSmi()) {
2309  val = UseRegister(instr->value());
2310  } else if (instr->field_representation().IsDouble()) {
2311  val = UseRegisterAtStart(instr->value());
2312  } else {
2313  val = UseRegister(instr->value());
2314  }
2315 
2316  // We only need a scratch register if we have a write barrier or we
2317  // have a store into the properties array (not in-object-property).
2318  LOperand* temp = (!is_in_object || needs_write_barrier ||
2319  needs_write_barrier_for_map) ? TempRegister() : NULL;
2320 
2321  LInstruction* result = new(zone()) LStoreNamedField(obj, val, temp);
2322  if (!instr->access().IsExternalMemory() &&
2323  instr->field_representation().IsHeapObject() &&
2324  (val->IsConstantOperand()
2325  ? HConstant::cast(instr->value())->HasSmiValue()
2326  : !instr->value()->type().IsHeapObject())) {
2327  result = AssignEnvironment(result);
2328  }
2329  return result;
2330 }
2331 
2332 
2333 LInstruction* LChunkBuilder::DoStoreNamedGeneric(HStoreNamedGeneric* instr) {
2334  LOperand* context = UseFixed(instr->context(), rsi);
2335  LOperand* object = UseFixed(instr->object(), rdx);
2336  LOperand* value = UseFixed(instr->value(), rax);
2337 
2338  LStoreNamedGeneric* result =
2339  new(zone()) LStoreNamedGeneric(context, object, value);
2340  return MarkAsCall(result, instr);
2341 }
2342 
2343 
2344 LInstruction* LChunkBuilder::DoStringAdd(HStringAdd* instr) {
2345  LOperand* context = UseFixed(instr->context(), rsi);
2346  LOperand* left = UseFixed(instr->left(), rdx);
2347  LOperand* right = UseFixed(instr->right(), rax);
2348  return MarkAsCall(
2349  DefineFixed(new(zone()) LStringAdd(context, left, right), rax), instr);
2350 }
2351 
2352 
2353 LInstruction* LChunkBuilder::DoStringCharCodeAt(HStringCharCodeAt* instr) {
2354  LOperand* string = UseTempRegister(instr->string());
2355  LOperand* index = UseTempRegister(instr->index());
2356  LOperand* context = UseAny(instr->context());
2357  LStringCharCodeAt* result =
2358  new(zone()) LStringCharCodeAt(context, string, index);
2359  return AssignPointerMap(DefineAsRegister(result));
2360 }
2361 
2362 
2363 LInstruction* LChunkBuilder::DoStringCharFromCode(HStringCharFromCode* instr) {
2364  LOperand* char_code = UseRegister(instr->value());
2365  LOperand* context = UseAny(instr->context());
2366  LStringCharFromCode* result =
2367  new(zone()) LStringCharFromCode(context, char_code);
2368  return AssignPointerMap(DefineAsRegister(result));
2369 }
2370 
2371 
2372 LInstruction* LChunkBuilder::DoAllocate(HAllocate* instr) {
2373  info()->MarkAsDeferredCalling();
2374  LOperand* context = UseAny(instr->context());
2375  LOperand* size = instr->size()->IsConstant()
2376  ? UseConstant(instr->size())
2377  : UseTempRegister(instr->size());
2378  LOperand* temp = TempRegister();
2379  LAllocate* result = new(zone()) LAllocate(context, size, temp);
2380  return AssignPointerMap(DefineAsRegister(result));
2381 }
2382 
2383 
2384 LInstruction* LChunkBuilder::DoRegExpLiteral(HRegExpLiteral* instr) {
2385  LOperand* context = UseFixed(instr->context(), rsi);
2386  LRegExpLiteral* result = new(zone()) LRegExpLiteral(context);
2387  return MarkAsCall(DefineFixed(result, rax), instr);
2388 }
2389 
2390 
2391 LInstruction* LChunkBuilder::DoFunctionLiteral(HFunctionLiteral* instr) {
2392  LOperand* context = UseFixed(instr->context(), rsi);
2393  LFunctionLiteral* result = new(zone()) LFunctionLiteral(context);
2394  return MarkAsCall(DefineFixed(result, rax), instr);
2395 }
2396 
2397 
2398 LInstruction* LChunkBuilder::DoOsrEntry(HOsrEntry* instr) {
2399  ASSERT(argument_count_ == 0);
2400  allocator_->MarkAsOsrEntry();
2401  current_block_->last_environment()->set_ast_id(instr->ast_id());
2402  return AssignEnvironment(new(zone()) LOsrEntry);
2403 }
2404 
2405 
2406 LInstruction* LChunkBuilder::DoParameter(HParameter* instr) {
2407  LParameter* result = new(zone()) LParameter;
2408  if (instr->kind() == HParameter::STACK_PARAMETER) {
2409  int spill_index = chunk()->GetParameterStackSlot(instr->index());
2410  return DefineAsSpilled(result, spill_index);
2411  } else {
2412  ASSERT(info()->IsStub());
2413  CodeStubInterfaceDescriptor* descriptor =
2414  info()->code_stub()->GetInterfaceDescriptor(info()->isolate());
2415  int index = static_cast<int>(instr->index());
2416  Register reg = descriptor->GetParameterRegister(index);
2417  return DefineFixed(result, reg);
2418  }
2419 }
2420 
2421 
2422 LInstruction* LChunkBuilder::DoUnknownOSRValue(HUnknownOSRValue* instr) {
2423  // Use an index that corresponds to the location in the unoptimized frame,
2424  // which the optimized frame will subsume.
2425  int env_index = instr->index();
2426  int spill_index = 0;
2427  if (instr->environment()->is_parameter_index(env_index)) {
2428  spill_index = chunk()->GetParameterStackSlot(env_index);
2429  } else {
2430  spill_index = env_index - instr->environment()->first_local_index();
2431  if (spill_index > LUnallocated::kMaxFixedSlotIndex) {
2432  Abort(kTooManySpillSlotsNeededForOSR);
2433  spill_index = 0;
2434  }
2435  }
2436  return DefineAsSpilled(new(zone()) LUnknownOSRValue, spill_index);
2437 }
2438 
2439 
2440 LInstruction* LChunkBuilder::DoCallStub(HCallStub* instr) {
2441  LOperand* context = UseFixed(instr->context(), rsi);
2442  LCallStub* result = new(zone()) LCallStub(context);
2443  return MarkAsCall(DefineFixed(result, rax), instr);
2444 }
2445 
2446 
2447 LInstruction* LChunkBuilder::DoArgumentsObject(HArgumentsObject* instr) {
2448  // There are no real uses of the arguments object.
2449  // arguments.length and element access are supported directly on
2450  // stack arguments, and any real arguments object use causes a bailout.
2451  // So this value is never used.
2452  return NULL;
2453 }
2454 
2455 
2456 LInstruction* LChunkBuilder::DoCapturedObject(HCapturedObject* instr) {
2457  instr->ReplayEnvironment(current_block_->last_environment());
2458 
2459  // There are no real uses of a captured object.
2460  return NULL;
2461 }
2462 
2463 
2464 LInstruction* LChunkBuilder::DoAccessArgumentsAt(HAccessArgumentsAt* instr) {
2465  info()->MarkAsRequiresFrame();
2466  LOperand* args = UseRegister(instr->arguments());
2467  LOperand* length;
2468  LOperand* index;
2469  if (instr->length()->IsConstant() && instr->index()->IsConstant()) {
2470  length = UseRegisterOrConstant(instr->length());
2471  index = UseOrConstant(instr->index());
2472  } else {
2473  length = UseTempRegister(instr->length());
2474  index = Use(instr->index());
2475  }
2476  return DefineAsRegister(new(zone()) LAccessArgumentsAt(args, length, index));
2477 }
2478 
2479 
2480 LInstruction* LChunkBuilder::DoToFastProperties(HToFastProperties* instr) {
2481  LOperand* object = UseFixed(instr->value(), rax);
2482  LToFastProperties* result = new(zone()) LToFastProperties(object);
2483  return MarkAsCall(DefineFixed(result, rax), instr);
2484 }
2485 
2486 
2487 LInstruction* LChunkBuilder::DoTypeof(HTypeof* instr) {
2488  LOperand* context = UseFixed(instr->context(), rsi);
2489  LOperand* value = UseAtStart(instr->value());
2490  LTypeof* result = new(zone()) LTypeof(context, value);
2491  return MarkAsCall(DefineFixed(result, rax), instr);
2492 }
2493 
2494 
2495 LInstruction* LChunkBuilder::DoTypeofIsAndBranch(HTypeofIsAndBranch* instr) {
2496  LInstruction* goto_instr = CheckElideControlInstruction(instr);
2497  if (goto_instr != NULL) return goto_instr;
2498 
2499  return new(zone()) LTypeofIsAndBranch(UseTempRegister(instr->value()));
2500 }
2501 
2502 
2503 LInstruction* LChunkBuilder::DoIsConstructCallAndBranch(
2504  HIsConstructCallAndBranch* instr) {
2505  return new(zone()) LIsConstructCallAndBranch(TempRegister());
2506 }
2507 
2508 
2509 LInstruction* LChunkBuilder::DoSimulate(HSimulate* instr) {
2510  instr->ReplayEnvironment(current_block_->last_environment());
2511  return NULL;
2512 }
2513 
2514 
2515 LInstruction* LChunkBuilder::DoStackCheck(HStackCheck* instr) {
2516  info()->MarkAsDeferredCalling();
2517  if (instr->is_function_entry()) {
2518  LOperand* context = UseFixed(instr->context(), rsi);
2519  return MarkAsCall(new(zone()) LStackCheck(context), instr);
2520  } else {
2521  ASSERT(instr->is_backwards_branch());
2522  LOperand* context = UseAny(instr->context());
2523  return AssignEnvironment(
2524  AssignPointerMap(new(zone()) LStackCheck(context)));
2525  }
2526 }
2527 
2528 
2529 LInstruction* LChunkBuilder::DoEnterInlined(HEnterInlined* instr) {
2530  HEnvironment* outer = current_block_->last_environment();
2531  HConstant* undefined = graph()->GetConstantUndefined();
2532  HEnvironment* inner = outer->CopyForInlining(instr->closure(),
2533  instr->arguments_count(),
2534  instr->function(),
2535  undefined,
2536  instr->inlining_kind());
2537  // Only replay binding of arguments object if it wasn't removed from graph.
2538  if (instr->arguments_var() != NULL && instr->arguments_object()->IsLinked()) {
2539  inner->Bind(instr->arguments_var(), instr->arguments_object());
2540  }
2541  inner->set_entry(instr);
2542  current_block_->UpdateEnvironment(inner);
2543  chunk_->AddInlinedClosure(instr->closure());
2544  return NULL;
2545 }
2546 
2547 
2548 LInstruction* LChunkBuilder::DoLeaveInlined(HLeaveInlined* instr) {
2549  LInstruction* pop = NULL;
2550 
2551  HEnvironment* env = current_block_->last_environment();
2552 
2553  if (env->entry()->arguments_pushed()) {
2554  int argument_count = env->arguments_environment()->parameter_count();
2555  pop = new(zone()) LDrop(argument_count);
2556  ASSERT(instr->argument_delta() == -argument_count);
2557  }
2558 
2559  HEnvironment* outer = current_block_->last_environment()->
2560  DiscardInlined(false);
2561  current_block_->UpdateEnvironment(outer);
2562 
2563  return pop;
2564 }
2565 
2566 
2567 LInstruction* LChunkBuilder::DoForInPrepareMap(HForInPrepareMap* instr) {
2568  LOperand* context = UseFixed(instr->context(), rsi);
2569  LOperand* object = UseFixed(instr->enumerable(), rax);
2570  LForInPrepareMap* result = new(zone()) LForInPrepareMap(context, object);
2571  return MarkAsCall(DefineFixed(result, rax), instr, CAN_DEOPTIMIZE_EAGERLY);
2572 }
2573 
2574 
2575 LInstruction* LChunkBuilder::DoForInCacheArray(HForInCacheArray* instr) {
2576  LOperand* map = UseRegister(instr->map());
2577  return AssignEnvironment(DefineAsRegister(
2578  new(zone()) LForInCacheArray(map)));
2579 }
2580 
2581 
2582 LInstruction* LChunkBuilder::DoCheckMapValue(HCheckMapValue* instr) {
2583  LOperand* value = UseRegisterAtStart(instr->value());
2584  LOperand* map = UseRegisterAtStart(instr->map());
2585  return AssignEnvironment(new(zone()) LCheckMapValue(value, map));
2586 }
2587 
2588 
2589 LInstruction* LChunkBuilder::DoLoadFieldByIndex(HLoadFieldByIndex* instr) {
2590  LOperand* object = UseRegister(instr->object());
2591  LOperand* index = UseTempRegister(instr->index());
2592  return DefineSameAsFirst(new(zone()) LLoadFieldByIndex(object, index));
2593 }
2594 
2595 
2596 } } // namespace v8::internal
2597 
2598 #endif // V8_TARGET_ARCH_X64
const Register rdx
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter NULL
Definition: flags.cc:269
#define DEFINE_COMPILE(type)
Definition: lithium-arm.cc:38
static LUnallocated * cast(LOperand *op)
Definition: lithium.h:156
const char * ToCString(const v8::String::Utf8Value &value)
void PrintDataTo(StringStream *stream) V8_OVERRIDE
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf map
Definition: flags.cc:350
virtual void PrintDataTo(StringStream *stream) V8_OVERRIDE
Definition: lithium-arm.cc:124
static String * cast(Object *obj)
virtual void PrintOutputOperandTo(StringStream *stream)
Definition: lithium-arm.cc:99
const XMMRegister xmm4
const Register rsi
int int32_t
Definition: unicode.cc:47
LEnvironment * environment() const
Definition: lithium-arm.h:246
#define ASSERT(condition)
Definition: checks.h:329
virtual const char * Mnemonic() const =0
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:86
bool IsSimpleMapChangeTransition(ElementsKind from_kind, ElementsKind to_kind)
#define LITHIUM_CONCRETE_INSTRUCTION_LIST(V)
Definition: lithium-arm.h:43
virtual LOperand * result() const =0
static const int kMaxFixedSlotIndex
Definition: lithium.h:195
uint32_t additional_index() const
virtual bool HasResult() const =0
#define UNREACHABLE()
Definition: checks.h:52
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object size
Definition: flags.cc:211
const XMMRegister xmm1
static const char * String(Value tok)
Definition: token.h:294
const Register rbx
bool HasEnvironment() const
Definition: lithium-arm.h:247
static int ToAllocationIndex(Register reg)
const Register rax
const Register rdi
uint32_t additional_index() const
virtual void PrintTo(StringStream *stream)
Definition: lithium-arm.cc:67
const XMMRegister xmm3
LPointerMap * pointer_map() const
Definition: lithium-arm.h:250
static int ToAllocationIndex(XMMRegister reg)
const char * ElementsKindToString(ElementsKind kind)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function info
Definition: flags.cc:317
void PrintDataTo(StringStream *stream) V8_OVERRIDE
virtual bool MustSignExtendResult(LPlatformChunk *chunk) const V8_FINAL V8_OVERRIDE
const Register rcx
bool IsDoubleOrFloatElementsKind(ElementsKind kind)
HeapObject * obj
bool HasPointerMap() const
Definition: lithium-arm.h:251
static Representation Tagged()
bool IsRedundant() const
Definition: lithium-arm.cc:113
const XMMRegister xmm2
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in name
Definition: flags.cc:505
static HValue * cast(HValue *value)
void PrintTo(StringStream *stream)
Definition: lithium.cc:55