v8  3.25.30(node0.11.13)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
lithium-ia32.cc
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "v8.h"
29 
30 #if V8_TARGET_ARCH_IA32
31 
32 #include "lithium-allocator-inl.h"
33 #include "ia32/lithium-ia32.h"
35 #include "hydrogen-osr.h"
36 
37 namespace v8 {
38 namespace internal {
39 
40 #define DEFINE_COMPILE(type) \
41  void L##type::CompileToNative(LCodeGen* generator) { \
42  generator->Do##type(this); \
43  }
45 #undef DEFINE_COMPILE
46 
47 
48 #ifdef DEBUG
49 void LInstruction::VerifyCall() {
50  // Call instructions can use only fixed registers as temporaries and
51  // outputs because all registers are blocked by the calling convention.
52  // Inputs operands must use a fixed register or use-at-start policy or
53  // a non-register policy.
54  ASSERT(Output() == NULL ||
55  LUnallocated::cast(Output())->HasFixedPolicy() ||
56  !LUnallocated::cast(Output())->HasRegisterPolicy());
57  for (UseIterator it(this); !it.Done(); it.Advance()) {
58  LUnallocated* operand = LUnallocated::cast(it.Current());
59  ASSERT(operand->HasFixedPolicy() ||
60  operand->IsUsedAtStart());
61  }
62  for (TempIterator it(this); !it.Done(); it.Advance()) {
63  LUnallocated* operand = LUnallocated::cast(it.Current());
64  ASSERT(operand->HasFixedPolicy() ||!operand->HasRegisterPolicy());
65  }
66 }
67 #endif
68 
69 
71  return HasResult() && result()->IsDoubleRegister();
72 }
73 
74 
76  for (int i = 0; i < InputCount(); i++) {
77  LOperand* op = InputAt(i);
78  if (op != NULL && op->IsDoubleRegister()) {
79  return true;
80  }
81  }
82  return false;
83 }
84 
85 
86 bool LInstruction::IsDoubleInput(X87Register reg, LCodeGen* cgen) {
87  for (int i = 0; i < InputCount(); i++) {
88  LOperand* op = InputAt(i);
89  if (op != NULL && op->IsDoubleRegister()) {
90  if (cgen->ToX87Register(op).is(reg)) return true;
91  }
92  }
93  return false;
94 }
95 
96 
97 void LInstruction::PrintTo(StringStream* stream) {
98  stream->Add("%s ", this->Mnemonic());
99 
100  PrintOutputOperandTo(stream);
101 
102  PrintDataTo(stream);
103 
104  if (HasEnvironment()) {
105  stream->Add(" ");
106  environment()->PrintTo(stream);
107  }
108 
109  if (HasPointerMap()) {
110  stream->Add(" ");
111  pointer_map()->PrintTo(stream);
112  }
113 }
114 
115 
116 void LInstruction::PrintDataTo(StringStream* stream) {
117  stream->Add("= ");
118  for (int i = 0; i < InputCount(); i++) {
119  if (i > 0) stream->Add(" ");
120  if (InputAt(i) == NULL) {
121  stream->Add("NULL");
122  } else {
123  InputAt(i)->PrintTo(stream);
124  }
125  }
126 }
127 
128 
129 void LInstruction::PrintOutputOperandTo(StringStream* stream) {
130  if (HasResult()) result()->PrintTo(stream);
131 }
132 
133 
134 void LLabel::PrintDataTo(StringStream* stream) {
135  LGap::PrintDataTo(stream);
136  LLabel* rep = replacement();
137  if (rep != NULL) {
138  stream->Add(" Dead block replaced with B%d", rep->block_id());
139  }
140 }
141 
142 
143 bool LGap::IsRedundant() const {
144  for (int i = 0; i < 4; i++) {
145  if (parallel_moves_[i] != NULL && !parallel_moves_[i]->IsRedundant()) {
146  return false;
147  }
148  }
149 
150  return true;
151 }
152 
153 
154 void LGap::PrintDataTo(StringStream* stream) {
155  for (int i = 0; i < 4; i++) {
156  stream->Add("(");
157  if (parallel_moves_[i] != NULL) {
158  parallel_moves_[i]->PrintDataTo(stream);
159  }
160  stream->Add(") ");
161  }
162 }
163 
164 
165 const char* LArithmeticD::Mnemonic() const {
166  switch (op()) {
167  case Token::ADD: return "add-d";
168  case Token::SUB: return "sub-d";
169  case Token::MUL: return "mul-d";
170  case Token::DIV: return "div-d";
171  case Token::MOD: return "mod-d";
172  default:
173  UNREACHABLE();
174  return NULL;
175  }
176 }
177 
178 
179 const char* LArithmeticT::Mnemonic() const {
180  switch (op()) {
181  case Token::ADD: return "add-t";
182  case Token::SUB: return "sub-t";
183  case Token::MUL: return "mul-t";
184  case Token::MOD: return "mod-t";
185  case Token::DIV: return "div-t";
186  case Token::BIT_AND: return "bit-and-t";
187  case Token::BIT_OR: return "bit-or-t";
188  case Token::BIT_XOR: return "bit-xor-t";
189  case Token::ROR: return "ror-t";
190  case Token::SHL: return "sal-t";
191  case Token::SAR: return "sar-t";
192  case Token::SHR: return "shr-t";
193  default:
194  UNREACHABLE();
195  return NULL;
196  }
197 }
198 
199 
200 bool LGoto::HasInterestingComment(LCodeGen* gen) const {
201  return !gen->IsNextEmittedBlock(block_id());
202 }
203 
204 
205 void LGoto::PrintDataTo(StringStream* stream) {
206  stream->Add("B%d", block_id());
207 }
208 
209 
210 void LBranch::PrintDataTo(StringStream* stream) {
211  stream->Add("B%d | B%d on ", true_block_id(), false_block_id());
212  value()->PrintTo(stream);
213 }
214 
215 
216 void LCompareNumericAndBranch::PrintDataTo(StringStream* stream) {
217  stream->Add("if ");
218  left()->PrintTo(stream);
219  stream->Add(" %s ", Token::String(op()));
220  right()->PrintTo(stream);
221  stream->Add(" then B%d else B%d", true_block_id(), false_block_id());
222 }
223 
224 
225 void LIsObjectAndBranch::PrintDataTo(StringStream* stream) {
226  stream->Add("if is_object(");
227  value()->PrintTo(stream);
228  stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
229 }
230 
231 
232 void LIsStringAndBranch::PrintDataTo(StringStream* stream) {
233  stream->Add("if is_string(");
234  value()->PrintTo(stream);
235  stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
236 }
237 
238 
239 void LIsSmiAndBranch::PrintDataTo(StringStream* stream) {
240  stream->Add("if is_smi(");
241  value()->PrintTo(stream);
242  stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
243 }
244 
245 
246 void LIsUndetectableAndBranch::PrintDataTo(StringStream* stream) {
247  stream->Add("if is_undetectable(");
248  value()->PrintTo(stream);
249  stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
250 }
251 
252 
253 void LStringCompareAndBranch::PrintDataTo(StringStream* stream) {
254  stream->Add("if string_compare(");
255  left()->PrintTo(stream);
256  right()->PrintTo(stream);
257  stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
258 }
259 
260 
261 void LHasInstanceTypeAndBranch::PrintDataTo(StringStream* stream) {
262  stream->Add("if has_instance_type(");
263  value()->PrintTo(stream);
264  stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
265 }
266 
267 
268 void LHasCachedArrayIndexAndBranch::PrintDataTo(StringStream* stream) {
269  stream->Add("if has_cached_array_index(");
270  value()->PrintTo(stream);
271  stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
272 }
273 
274 
275 void LClassOfTestAndBranch::PrintDataTo(StringStream* stream) {
276  stream->Add("if class_of_test(");
277  value()->PrintTo(stream);
278  stream->Add(", \"%o\") then B%d else B%d",
279  *hydrogen()->class_name(),
280  true_block_id(),
281  false_block_id());
282 }
283 
284 
285 void LTypeofIsAndBranch::PrintDataTo(StringStream* stream) {
286  stream->Add("if typeof ");
287  value()->PrintTo(stream);
288  stream->Add(" == \"%s\" then B%d else B%d",
289  hydrogen()->type_literal()->ToCString().get(),
290  true_block_id(), false_block_id());
291 }
292 
293 
294 void LStoreCodeEntry::PrintDataTo(StringStream* stream) {
295  stream->Add(" = ");
296  function()->PrintTo(stream);
297  stream->Add(".code_entry = ");
298  code_object()->PrintTo(stream);
299 }
300 
301 
302 void LInnerAllocatedObject::PrintDataTo(StringStream* stream) {
303  stream->Add(" = ");
304  base_object()->PrintTo(stream);
305  stream->Add(" + ");
306  offset()->PrintTo(stream);
307 }
308 
309 
310 void LCallJSFunction::PrintDataTo(StringStream* stream) {
311  stream->Add("= ");
312  function()->PrintTo(stream);
313  stream->Add("#%d / ", arity());
314 }
315 
316 
317 void LCallWithDescriptor::PrintDataTo(StringStream* stream) {
318  for (int i = 0; i < InputCount(); i++) {
319  InputAt(i)->PrintTo(stream);
320  stream->Add(" ");
321  }
322  stream->Add("#%d / ", arity());
323 }
324 
325 
326 void LLoadContextSlot::PrintDataTo(StringStream* stream) {
327  context()->PrintTo(stream);
328  stream->Add("[%d]", slot_index());
329 }
330 
331 
332 void LStoreContextSlot::PrintDataTo(StringStream* stream) {
333  context()->PrintTo(stream);
334  stream->Add("[%d] <- ", slot_index());
335  value()->PrintTo(stream);
336 }
337 
338 
339 void LInvokeFunction::PrintDataTo(StringStream* stream) {
340  stream->Add("= ");
341  context()->PrintTo(stream);
342  stream->Add(" ");
343  function()->PrintTo(stream);
344  stream->Add(" #%d / ", arity());
345 }
346 
347 
348 void LCallNew::PrintDataTo(StringStream* stream) {
349  stream->Add("= ");
350  context()->PrintTo(stream);
351  stream->Add(" ");
352  constructor()->PrintTo(stream);
353  stream->Add(" #%d / ", arity());
354 }
355 
356 
357 void LCallNewArray::PrintDataTo(StringStream* stream) {
358  stream->Add("= ");
359  context()->PrintTo(stream);
360  stream->Add(" ");
361  constructor()->PrintTo(stream);
362  stream->Add(" #%d / ", arity());
363  ElementsKind kind = hydrogen()->elements_kind();
364  stream->Add(" (%s) ", ElementsKindToString(kind));
365 }
366 
367 
368 void LAccessArgumentsAt::PrintDataTo(StringStream* stream) {
369  arguments()->PrintTo(stream);
370 
371  stream->Add(" length ");
372  length()->PrintTo(stream);
373 
374  stream->Add(" index ");
375  index()->PrintTo(stream);
376 }
377 
378 
379 int LPlatformChunk::GetNextSpillIndex(RegisterKind kind) {
380  // Skip a slot if for a double-width slot.
381  if (kind == DOUBLE_REGISTERS) {
382  spill_slot_count_++;
383  spill_slot_count_ |= 1;
384  num_double_slots_++;
385  }
386  return spill_slot_count_++;
387 }
388 
389 
390 LOperand* LPlatformChunk::GetNextSpillSlot(RegisterKind kind) {
391  int index = GetNextSpillIndex(kind);
392  if (kind == DOUBLE_REGISTERS) {
393  return LDoubleStackSlot::Create(index, zone());
394  } else {
395  ASSERT(kind == GENERAL_REGISTERS);
396  return LStackSlot::Create(index, zone());
397  }
398 }
399 
400 
401 void LStoreNamedField::PrintDataTo(StringStream* stream) {
402  object()->PrintTo(stream);
403  hydrogen()->access().PrintTo(stream);
404  stream->Add(" <- ");
405  value()->PrintTo(stream);
406 }
407 
408 
409 void LStoreNamedGeneric::PrintDataTo(StringStream* stream) {
410  object()->PrintTo(stream);
411  stream->Add(".");
412  stream->Add(String::cast(*name())->ToCString().get());
413  stream->Add(" <- ");
414  value()->PrintTo(stream);
415 }
416 
417 
418 void LLoadKeyed::PrintDataTo(StringStream* stream) {
419  elements()->PrintTo(stream);
420  stream->Add("[");
421  key()->PrintTo(stream);
422  if (hydrogen()->IsDehoisted()) {
423  stream->Add(" + %d]", additional_index());
424  } else {
425  stream->Add("]");
426  }
427 }
428 
429 
430 void LStoreKeyed::PrintDataTo(StringStream* stream) {
431  elements()->PrintTo(stream);
432  stream->Add("[");
433  key()->PrintTo(stream);
434  if (hydrogen()->IsDehoisted()) {
435  stream->Add(" + %d] <-", additional_index());
436  } else {
437  stream->Add("] <- ");
438  }
439 
440  if (value() == NULL) {
441  ASSERT(hydrogen()->IsConstantHoleStore() &&
442  hydrogen()->value()->representation().IsDouble());
443  stream->Add("<the hole(nan)>");
444  } else {
445  value()->PrintTo(stream);
446  }
447 }
448 
449 
450 void LStoreKeyedGeneric::PrintDataTo(StringStream* stream) {
451  object()->PrintTo(stream);
452  stream->Add("[");
453  key()->PrintTo(stream);
454  stream->Add("] <- ");
455  value()->PrintTo(stream);
456 }
457 
458 
459 void LTransitionElementsKind::PrintDataTo(StringStream* stream) {
460  object()->PrintTo(stream);
461  stream->Add(" %p -> %p", *original_map(), *transitioned_map());
462 }
463 
464 
465 LPlatformChunk* LChunkBuilder::Build() {
466  ASSERT(is_unused());
467  chunk_ = new(zone()) LPlatformChunk(info(), graph());
468  LPhase phase("L_Building chunk", chunk_);
469  status_ = BUILDING;
470 
471  // Reserve the first spill slot for the state of dynamic alignment.
472  if (info()->IsOptimizing()) {
473  int alignment_state_index = chunk_->GetNextSpillIndex(GENERAL_REGISTERS);
474  ASSERT_EQ(alignment_state_index, 0);
475  USE(alignment_state_index);
476  }
477 
478  // If compiling for OSR, reserve space for the unoptimized frame,
479  // which will be subsumed into this frame.
480  if (graph()->has_osr()) {
481  for (int i = graph()->osr()->UnoptimizedFrameSlots(); i > 0; i--) {
482  chunk_->GetNextSpillIndex(GENERAL_REGISTERS);
483  }
484  }
485 
486  const ZoneList<HBasicBlock*>* blocks = graph()->blocks();
487  for (int i = 0; i < blocks->length(); i++) {
488  HBasicBlock* next = NULL;
489  if (i < blocks->length() - 1) next = blocks->at(i + 1);
490  DoBasicBlock(blocks->at(i), next);
491  if (is_aborted()) return NULL;
492  }
493  status_ = DONE;
494  return chunk_;
495 }
496 
497 
498 void LChunkBuilder::Abort(BailoutReason reason) {
499  info()->set_bailout_reason(reason);
500  status_ = ABORTED;
501 }
502 
503 
504 LUnallocated* LChunkBuilder::ToUnallocated(Register reg) {
505  return new(zone()) LUnallocated(LUnallocated::FIXED_REGISTER,
507 }
508 
509 
510 LUnallocated* LChunkBuilder::ToUnallocated(XMMRegister reg) {
511  return new(zone()) LUnallocated(LUnallocated::FIXED_DOUBLE_REGISTER,
513 }
514 
515 
516 LOperand* LChunkBuilder::UseFixed(HValue* value, Register fixed_register) {
517  return Use(value, ToUnallocated(fixed_register));
518 }
519 
520 
521 LOperand* LChunkBuilder::UseFixedDouble(HValue* value, XMMRegister reg) {
522  return Use(value, ToUnallocated(reg));
523 }
524 
525 
526 LOperand* LChunkBuilder::UseRegister(HValue* value) {
527  return Use(value, new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER));
528 }
529 
530 
531 LOperand* LChunkBuilder::UseRegisterAtStart(HValue* value) {
532  return Use(value,
533  new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER,
535 }
536 
537 
538 LOperand* LChunkBuilder::UseTempRegister(HValue* value) {
539  return Use(value, new(zone()) LUnallocated(LUnallocated::WRITABLE_REGISTER));
540 }
541 
542 
543 LOperand* LChunkBuilder::Use(HValue* value) {
544  return Use(value, new(zone()) LUnallocated(LUnallocated::NONE));
545 }
546 
547 
548 LOperand* LChunkBuilder::UseAtStart(HValue* value) {
549  return Use(value, new(zone()) LUnallocated(LUnallocated::NONE,
551 }
552 
553 
554 static inline bool CanBeImmediateConstant(HValue* value) {
555  return value->IsConstant() && HConstant::cast(value)->NotInNewSpace();
556 }
557 
558 
559 LOperand* LChunkBuilder::UseOrConstant(HValue* value) {
560  return CanBeImmediateConstant(value)
561  ? chunk_->DefineConstantOperand(HConstant::cast(value))
562  : Use(value);
563 }
564 
565 
566 LOperand* LChunkBuilder::UseOrConstantAtStart(HValue* value) {
567  return CanBeImmediateConstant(value)
568  ? chunk_->DefineConstantOperand(HConstant::cast(value))
569  : UseAtStart(value);
570 }
571 
572 
573 LOperand* LChunkBuilder::UseFixedOrConstant(HValue* value,
574  Register fixed_register) {
575  return CanBeImmediateConstant(value)
576  ? chunk_->DefineConstantOperand(HConstant::cast(value))
577  : UseFixed(value, fixed_register);
578 }
579 
580 
581 LOperand* LChunkBuilder::UseRegisterOrConstant(HValue* value) {
582  return CanBeImmediateConstant(value)
583  ? chunk_->DefineConstantOperand(HConstant::cast(value))
584  : UseRegister(value);
585 }
586 
587 
588 LOperand* LChunkBuilder::UseRegisterOrConstantAtStart(HValue* value) {
589  return CanBeImmediateConstant(value)
590  ? chunk_->DefineConstantOperand(HConstant::cast(value))
591  : UseRegisterAtStart(value);
592 }
593 
594 
595 LOperand* LChunkBuilder::UseConstant(HValue* value) {
596  return chunk_->DefineConstantOperand(HConstant::cast(value));
597 }
598 
599 
600 LOperand* LChunkBuilder::UseAny(HValue* value) {
601  return value->IsConstant()
602  ? chunk_->DefineConstantOperand(HConstant::cast(value))
603  : Use(value, new(zone()) LUnallocated(LUnallocated::ANY));
604 }
605 
606 
607 LOperand* LChunkBuilder::Use(HValue* value, LUnallocated* operand) {
608  if (value->EmitAtUses()) {
609  HInstruction* instr = HInstruction::cast(value);
610  VisitInstruction(instr);
611  }
612  operand->set_virtual_register(value->id());
613  return operand;
614 }
615 
616 
617 LInstruction* LChunkBuilder::Define(LTemplateResultInstruction<1>* instr,
618  LUnallocated* result) {
619  result->set_virtual_register(current_instruction_->id());
620  instr->set_result(result);
621  return instr;
622 }
623 
624 
625 LInstruction* LChunkBuilder::DefineAsRegister(
626  LTemplateResultInstruction<1>* instr) {
627  return Define(instr,
628  new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER));
629 }
630 
631 
632 LInstruction* LChunkBuilder::DefineAsSpilled(
633  LTemplateResultInstruction<1>* instr,
634  int index) {
635  return Define(instr,
636  new(zone()) LUnallocated(LUnallocated::FIXED_SLOT, index));
637 }
638 
639 
640 LInstruction* LChunkBuilder::DefineSameAsFirst(
641  LTemplateResultInstruction<1>* instr) {
642  return Define(instr,
643  new(zone()) LUnallocated(LUnallocated::SAME_AS_FIRST_INPUT));
644 }
645 
646 
647 LInstruction* LChunkBuilder::DefineFixed(LTemplateResultInstruction<1>* instr,
648  Register reg) {
649  return Define(instr, ToUnallocated(reg));
650 }
651 
652 
653 LInstruction* LChunkBuilder::DefineFixedDouble(
654  LTemplateResultInstruction<1>* instr,
655  XMMRegister reg) {
656  return Define(instr, ToUnallocated(reg));
657 }
658 
659 
660 LInstruction* LChunkBuilder::AssignEnvironment(LInstruction* instr) {
661  HEnvironment* hydrogen_env = current_block_->last_environment();
662  int argument_index_accumulator = 0;
663  ZoneList<HValue*> objects_to_materialize(0, zone());
664  instr->set_environment(CreateEnvironment(hydrogen_env,
665  &argument_index_accumulator,
666  &objects_to_materialize));
667  return instr;
668 }
669 
670 
671 LInstruction* LChunkBuilder::MarkAsCall(LInstruction* instr,
672  HInstruction* hinstr,
673  CanDeoptimize can_deoptimize) {
674  info()->MarkAsNonDeferredCalling();
675 
676 #ifdef DEBUG
677  instr->VerifyCall();
678 #endif
679  instr->MarkAsCall();
680  instr = AssignPointerMap(instr);
681 
682  // If instruction does not have side-effects lazy deoptimization
683  // after the call will try to deoptimize to the point before the call.
684  // Thus we still need to attach environment to this call even if
685  // call sequence can not deoptimize eagerly.
686  bool needs_environment =
687  (can_deoptimize == CAN_DEOPTIMIZE_EAGERLY) ||
688  !hinstr->HasObservableSideEffects();
689  if (needs_environment && !instr->HasEnvironment()) {
690  instr = AssignEnvironment(instr);
691  }
692 
693  return instr;
694 }
695 
696 
697 LInstruction* LChunkBuilder::AssignPointerMap(LInstruction* instr) {
698  ASSERT(!instr->HasPointerMap());
699  instr->set_pointer_map(new(zone()) LPointerMap(zone()));
700  return instr;
701 }
702 
703 
704 LUnallocated* LChunkBuilder::TempRegister() {
705  LUnallocated* operand =
706  new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER);
707  int vreg = allocator_->GetVirtualRegister();
708  if (!allocator_->AllocationOk()) {
709  Abort(kOutOfVirtualRegistersWhileTryingToAllocateTempRegister);
710  vreg = 0;
711  }
712  operand->set_virtual_register(vreg);
713  return operand;
714 }
715 
716 
717 LOperand* LChunkBuilder::FixedTemp(Register reg) {
718  LUnallocated* operand = ToUnallocated(reg);
719  ASSERT(operand->HasFixedPolicy());
720  return operand;
721 }
722 
723 
724 LOperand* LChunkBuilder::FixedTemp(XMMRegister reg) {
725  LUnallocated* operand = ToUnallocated(reg);
726  ASSERT(operand->HasFixedPolicy());
727  return operand;
728 }
729 
730 
731 LInstruction* LChunkBuilder::DoBlockEntry(HBlockEntry* instr) {
732  return new(zone()) LLabel(instr->block());
733 }
734 
735 
736 LInstruction* LChunkBuilder::DoDummyUse(HDummyUse* instr) {
737  return DefineAsRegister(new(zone()) LDummyUse(UseAny(instr->value())));
738 }
739 
740 
741 LInstruction* LChunkBuilder::DoEnvironmentMarker(HEnvironmentMarker* instr) {
742  UNREACHABLE();
743  return NULL;
744 }
745 
746 
747 LInstruction* LChunkBuilder::DoDeoptimize(HDeoptimize* instr) {
748  return AssignEnvironment(new(zone()) LDeoptimize);
749 }
750 
751 
752 LInstruction* LChunkBuilder::DoShift(Token::Value op,
753  HBitwiseBinaryOperation* instr) {
754  if (instr->representation().IsSmiOrInteger32()) {
755  ASSERT(instr->left()->representation().Equals(instr->representation()));
756  ASSERT(instr->right()->representation().Equals(instr->representation()));
757  LOperand* left = UseRegisterAtStart(instr->left());
758 
759  HValue* right_value = instr->right();
760  LOperand* right = NULL;
761  int constant_value = 0;
762  bool does_deopt = false;
763  if (right_value->IsConstant()) {
764  HConstant* constant = HConstant::cast(right_value);
765  right = chunk_->DefineConstantOperand(constant);
766  constant_value = constant->Integer32Value() & 0x1f;
767  // Left shifts can deoptimize if we shift by > 0 and the result cannot be
768  // truncated to smi.
769  if (instr->representation().IsSmi() && constant_value > 0) {
770  does_deopt = !instr->CheckUsesForFlag(HValue::kTruncatingToSmi);
771  }
772  } else {
773  right = UseFixed(right_value, ecx);
774  }
775 
776  // Shift operations can only deoptimize if we do a logical shift by 0 and
777  // the result cannot be truncated to int32.
778  if (op == Token::SHR && constant_value == 0) {
779  if (FLAG_opt_safe_uint32_operations) {
780  does_deopt = !instr->CheckFlag(HInstruction::kUint32);
781  } else {
782  does_deopt = !instr->CheckUsesForFlag(HValue::kTruncatingToInt32);
783  }
784  }
785 
786  LInstruction* result =
787  DefineSameAsFirst(new(zone()) LShiftI(op, left, right, does_deopt));
788  return does_deopt ? AssignEnvironment(result) : result;
789  } else {
790  return DoArithmeticT(op, instr);
791  }
792 }
793 
794 
795 LInstruction* LChunkBuilder::DoArithmeticD(Token::Value op,
796  HArithmeticBinaryOperation* instr) {
797  ASSERT(instr->representation().IsDouble());
798  ASSERT(instr->left()->representation().IsDouble());
799  ASSERT(instr->right()->representation().IsDouble());
800  if (op == Token::MOD) {
801  LOperand* left = UseRegisterAtStart(instr->BetterLeftOperand());
802  LOperand* right = UseRegisterAtStart(instr->BetterRightOperand());
803  LArithmeticD* result = new(zone()) LArithmeticD(op, left, right);
804  return MarkAsCall(DefineSameAsFirst(result), instr);
805  } else {
806  LOperand* left = UseRegisterAtStart(instr->BetterLeftOperand());
807  LOperand* right = UseRegisterAtStart(instr->BetterRightOperand());
808  LArithmeticD* result = new(zone()) LArithmeticD(op, left, right);
809  return DefineSameAsFirst(result);
810  }
811 }
812 
813 
814 LInstruction* LChunkBuilder::DoArithmeticT(Token::Value op,
815  HBinaryOperation* instr) {
816  HValue* left = instr->left();
817  HValue* right = instr->right();
818  ASSERT(left->representation().IsTagged());
819  ASSERT(right->representation().IsTagged());
820  LOperand* context = UseFixed(instr->context(), esi);
821  LOperand* left_operand = UseFixed(left, edx);
822  LOperand* right_operand = UseFixed(right, eax);
823  LArithmeticT* result =
824  new(zone()) LArithmeticT(op, context, left_operand, right_operand);
825  return MarkAsCall(DefineFixed(result, eax), instr);
826 }
827 
828 
829 void LChunkBuilder::DoBasicBlock(HBasicBlock* block, HBasicBlock* next_block) {
830  ASSERT(is_building());
831  current_block_ = block;
832  next_block_ = next_block;
833  if (block->IsStartBlock()) {
834  block->UpdateEnvironment(graph_->start_environment());
835  argument_count_ = 0;
836  } else if (block->predecessors()->length() == 1) {
837  // We have a single predecessor => copy environment and outgoing
838  // argument count from the predecessor.
839  ASSERT(block->phis()->length() == 0);
840  HBasicBlock* pred = block->predecessors()->at(0);
841  HEnvironment* last_environment = pred->last_environment();
842  ASSERT(last_environment != NULL);
843  // Only copy the environment, if it is later used again.
844  if (pred->end()->SecondSuccessor() == NULL) {
845  ASSERT(pred->end()->FirstSuccessor() == block);
846  } else {
847  if (pred->end()->FirstSuccessor()->block_id() > block->block_id() ||
848  pred->end()->SecondSuccessor()->block_id() > block->block_id()) {
849  last_environment = last_environment->Copy();
850  }
851  }
852  block->UpdateEnvironment(last_environment);
853  ASSERT(pred->argument_count() >= 0);
854  argument_count_ = pred->argument_count();
855  } else {
856  // We are at a state join => process phis.
857  HBasicBlock* pred = block->predecessors()->at(0);
858  // No need to copy the environment, it cannot be used later.
859  HEnvironment* last_environment = pred->last_environment();
860  for (int i = 0; i < block->phis()->length(); ++i) {
861  HPhi* phi = block->phis()->at(i);
862  if (phi->HasMergedIndex()) {
863  last_environment->SetValueAt(phi->merged_index(), phi);
864  }
865  }
866  for (int i = 0; i < block->deleted_phis()->length(); ++i) {
867  if (block->deleted_phis()->at(i) < last_environment->length()) {
868  last_environment->SetValueAt(block->deleted_phis()->at(i),
869  graph_->GetConstantUndefined());
870  }
871  }
872  block->UpdateEnvironment(last_environment);
873  // Pick up the outgoing argument count of one of the predecessors.
874  argument_count_ = pred->argument_count();
875  }
876  HInstruction* current = block->first();
877  int start = chunk_->instructions()->length();
878  while (current != NULL && !is_aborted()) {
879  // Code for constants in registers is generated lazily.
880  if (!current->EmitAtUses()) {
881  VisitInstruction(current);
882  }
883  current = current->next();
884  }
885  int end = chunk_->instructions()->length() - 1;
886  if (end >= start) {
887  block->set_first_instruction_index(start);
888  block->set_last_instruction_index(end);
889  }
890  block->set_argument_count(argument_count_);
891  next_block_ = NULL;
892  current_block_ = NULL;
893 }
894 
895 
896 void LChunkBuilder::VisitInstruction(HInstruction* current) {
897  HInstruction* old_current = current_instruction_;
898  current_instruction_ = current;
899 
900  LInstruction* instr = NULL;
901  if (current->CanReplaceWithDummyUses()) {
902  if (current->OperandCount() == 0) {
903  instr = DefineAsRegister(new(zone()) LDummy());
904  } else {
905  ASSERT(!current->OperandAt(0)->IsControlInstruction());
906  instr = DefineAsRegister(new(zone())
907  LDummyUse(UseAny(current->OperandAt(0))));
908  }
909  for (int i = 1; i < current->OperandCount(); ++i) {
910  if (current->OperandAt(i)->IsControlInstruction()) continue;
911  LInstruction* dummy =
912  new(zone()) LDummyUse(UseAny(current->OperandAt(i)));
913  dummy->set_hydrogen_value(current);
914  chunk_->AddInstruction(dummy, current_block_);
915  }
916  } else {
917  instr = current->CompileToLithium(this);
918  }
919 
920  argument_count_ += current->argument_delta();
921  ASSERT(argument_count_ >= 0);
922 
923  if (instr != NULL) {
924  // Associate the hydrogen instruction first, since we may need it for
925  // the ClobbersRegisters() or ClobbersDoubleRegisters() calls below.
926  instr->set_hydrogen_value(current);
927 
928 #if DEBUG
929  // Make sure that the lithium instruction has either no fixed register
930  // constraints in temps or the result OR no uses that are only used at
931  // start. If this invariant doesn't hold, the register allocator can decide
932  // to insert a split of a range immediately before the instruction due to an
933  // already allocated register needing to be used for the instruction's fixed
934  // register constraint. In this case, The register allocator won't see an
935  // interference between the split child and the use-at-start (it would if
936  // the it was just a plain use), so it is free to move the split child into
937  // the same register that is used for the use-at-start.
938  // See https://code.google.com/p/chromium/issues/detail?id=201590
939  if (!(instr->ClobbersRegisters() && instr->ClobbersDoubleRegisters())) {
940  int fixed = 0;
941  int used_at_start = 0;
942  for (UseIterator it(instr); !it.Done(); it.Advance()) {
943  LUnallocated* operand = LUnallocated::cast(it.Current());
944  if (operand->IsUsedAtStart()) ++used_at_start;
945  }
946  if (instr->Output() != NULL) {
947  if (LUnallocated::cast(instr->Output())->HasFixedPolicy()) ++fixed;
948  }
949  for (TempIterator it(instr); !it.Done(); it.Advance()) {
950  LUnallocated* operand = LUnallocated::cast(it.Current());
951  if (operand->HasFixedPolicy()) ++fixed;
952  }
953  ASSERT(fixed == 0 || used_at_start == 0);
954  }
955 #endif
956 
957  if (FLAG_stress_pointer_maps && !instr->HasPointerMap()) {
958  instr = AssignPointerMap(instr);
959  }
960  if (FLAG_stress_environments && !instr->HasEnvironment()) {
961  instr = AssignEnvironment(instr);
962  }
963  if (!CpuFeatures::IsSafeForSnapshot(SSE2) && instr->IsGoto() &&
964  LGoto::cast(instr)->jumps_to_join()) {
965  // TODO(olivf) Since phis of spilled values are joined as registers
966  // (not in the stack slot), we need to allow the goto gaps to keep one
967  // x87 register alive. To ensure all other values are still spilled, we
968  // insert a fpu register barrier right before.
969  LClobberDoubles* clobber = new(zone()) LClobberDoubles();
970  clobber->set_hydrogen_value(current);
971  chunk_->AddInstruction(clobber, current_block_);
972  }
973  chunk_->AddInstruction(instr, current_block_);
974 
975  if (instr->IsCall()) {
976  HValue* hydrogen_value_for_lazy_bailout = current;
977  LInstruction* instruction_needing_environment = NULL;
978  if (current->HasObservableSideEffects()) {
979  HSimulate* sim = HSimulate::cast(current->next());
980  instruction_needing_environment = instr;
981  sim->ReplayEnvironment(current_block_->last_environment());
982  hydrogen_value_for_lazy_bailout = sim;
983  }
984  LInstruction* bailout = AssignEnvironment(new(zone()) LLazyBailout());
985  bailout->set_hydrogen_value(hydrogen_value_for_lazy_bailout);
986  chunk_->AddInstruction(bailout, current_block_);
987  if (instruction_needing_environment != NULL) {
988  // Store the lazy deopt environment with the instruction if needed.
989  // Right now it is only used for LInstanceOfKnownGlobal.
990  instruction_needing_environment->
991  SetDeferredLazyDeoptimizationEnvironment(bailout->environment());
992  }
993  }
994  }
995  current_instruction_ = old_current;
996 }
997 
998 
999 LInstruction* LChunkBuilder::DoGoto(HGoto* instr) {
1000  return new(zone()) LGoto(instr->FirstSuccessor());
1001 }
1002 
1003 
1004 LInstruction* LChunkBuilder::DoBranch(HBranch* instr) {
1005  LInstruction* goto_instr = CheckElideControlInstruction(instr);
1006  if (goto_instr != NULL) return goto_instr;
1007 
1008  HValue* value = instr->value();
1009  Representation r = value->representation();
1010  HType type = value->type();
1011  ToBooleanStub::Types expected = instr->expected_input_types();
1012  if (expected.IsEmpty()) expected = ToBooleanStub::Types::Generic();
1013 
1014  bool easy_case = !r.IsTagged() || type.IsBoolean() || type.IsSmi() ||
1015  type.IsJSArray() || type.IsHeapNumber() || type.IsString();
1016  LOperand* temp = !easy_case && expected.NeedsMap() ? TempRegister() : NULL;
1017  LInstruction* branch = new(zone()) LBranch(UseRegister(value), temp);
1018  if (!easy_case &&
1019  ((!expected.Contains(ToBooleanStub::SMI) && expected.NeedsMap()) ||
1020  !expected.IsGeneric())) {
1021  branch = AssignEnvironment(branch);
1022  }
1023  return branch;
1024 }
1025 
1026 
1027 LInstruction* LChunkBuilder::DoDebugBreak(HDebugBreak* instr) {
1028  return new(zone()) LDebugBreak();
1029 }
1030 
1031 
1032 LInstruction* LChunkBuilder::DoCompareMap(HCompareMap* instr) {
1033  LInstruction* goto_instr = CheckElideControlInstruction(instr);
1034  if (goto_instr != NULL) return goto_instr;
1035 
1036  ASSERT(instr->value()->representation().IsTagged());
1037  LOperand* value = UseRegisterAtStart(instr->value());
1038  return new(zone()) LCmpMapAndBranch(value);
1039 }
1040 
1041 
1042 LInstruction* LChunkBuilder::DoArgumentsLength(HArgumentsLength* length) {
1043  info()->MarkAsRequiresFrame();
1044  return DefineAsRegister(new(zone()) LArgumentsLength(Use(length->value())));
1045 }
1046 
1047 
1048 LInstruction* LChunkBuilder::DoArgumentsElements(HArgumentsElements* elems) {
1049  info()->MarkAsRequiresFrame();
1050  return DefineAsRegister(new(zone()) LArgumentsElements);
1051 }
1052 
1053 
1054 LInstruction* LChunkBuilder::DoInstanceOf(HInstanceOf* instr) {
1055  LOperand* left = UseFixed(instr->left(), InstanceofStub::left());
1056  LOperand* right = UseFixed(instr->right(), InstanceofStub::right());
1057  LOperand* context = UseFixed(instr->context(), esi);
1058  LInstanceOf* result = new(zone()) LInstanceOf(context, left, right);
1059  return MarkAsCall(DefineFixed(result, eax), instr);
1060 }
1061 
1062 
1063 LInstruction* LChunkBuilder::DoInstanceOfKnownGlobal(
1064  HInstanceOfKnownGlobal* instr) {
1065  LInstanceOfKnownGlobal* result =
1066  new(zone()) LInstanceOfKnownGlobal(
1067  UseFixed(instr->context(), esi),
1068  UseFixed(instr->left(), InstanceofStub::left()),
1069  FixedTemp(edi));
1070  return MarkAsCall(DefineFixed(result, eax), instr);
1071 }
1072 
1073 
1074 LInstruction* LChunkBuilder::DoWrapReceiver(HWrapReceiver* instr) {
1075  LOperand* receiver = UseRegister(instr->receiver());
1076  LOperand* function = UseRegister(instr->function());
1077  LOperand* temp = TempRegister();
1078  LWrapReceiver* result =
1079  new(zone()) LWrapReceiver(receiver, function, temp);
1080  return AssignEnvironment(DefineSameAsFirst(result));
1081 }
1082 
1083 
1084 LInstruction* LChunkBuilder::DoApplyArguments(HApplyArguments* instr) {
1085  LOperand* function = UseFixed(instr->function(), edi);
1086  LOperand* receiver = UseFixed(instr->receiver(), eax);
1087  LOperand* length = UseFixed(instr->length(), ebx);
1088  LOperand* elements = UseFixed(instr->elements(), ecx);
1089  LApplyArguments* result = new(zone()) LApplyArguments(function,
1090  receiver,
1091  length,
1092  elements);
1093  return MarkAsCall(DefineFixed(result, eax), instr, CAN_DEOPTIMIZE_EAGERLY);
1094 }
1095 
1096 
1097 LInstruction* LChunkBuilder::DoPushArgument(HPushArgument* instr) {
1098  LOperand* argument = UseAny(instr->argument());
1099  return new(zone()) LPushArgument(argument);
1100 }
1101 
1102 
1103 LInstruction* LChunkBuilder::DoStoreCodeEntry(
1104  HStoreCodeEntry* store_code_entry) {
1105  LOperand* function = UseRegister(store_code_entry->function());
1106  LOperand* code_object = UseTempRegister(store_code_entry->code_object());
1107  return new(zone()) LStoreCodeEntry(function, code_object);
1108 }
1109 
1110 
1111 LInstruction* LChunkBuilder::DoInnerAllocatedObject(
1112  HInnerAllocatedObject* instr) {
1113  LOperand* base_object = UseRegisterAtStart(instr->base_object());
1114  LOperand* offset = UseRegisterOrConstantAtStart(instr->offset());
1115  return DefineAsRegister(
1116  new(zone()) LInnerAllocatedObject(base_object, offset));
1117 }
1118 
1119 
1120 LInstruction* LChunkBuilder::DoThisFunction(HThisFunction* instr) {
1121  return instr->HasNoUses()
1122  ? NULL
1123  : DefineAsRegister(new(zone()) LThisFunction);
1124 }
1125 
1126 
1127 LInstruction* LChunkBuilder::DoContext(HContext* instr) {
1128  if (instr->HasNoUses()) return NULL;
1129 
1130  if (info()->IsStub()) {
1131  return DefineFixed(new(zone()) LContext, esi);
1132  }
1133 
1134  return DefineAsRegister(new(zone()) LContext);
1135 }
1136 
1137 
1138 LInstruction* LChunkBuilder::DoDeclareGlobals(HDeclareGlobals* instr) {
1139  LOperand* context = UseFixed(instr->context(), esi);
1140  return MarkAsCall(new(zone()) LDeclareGlobals(context), instr);
1141 }
1142 
1143 
1144 LInstruction* LChunkBuilder::DoCallJSFunction(
1145  HCallJSFunction* instr) {
1146  LOperand* function = UseFixed(instr->function(), edi);
1147 
1148  LCallJSFunction* result = new(zone()) LCallJSFunction(function);
1149 
1150  return MarkAsCall(DefineFixed(result, eax), instr, CANNOT_DEOPTIMIZE_EAGERLY);
1151 }
1152 
1153 
1154 LInstruction* LChunkBuilder::DoCallWithDescriptor(
1155  HCallWithDescriptor* instr) {
1156  const CallInterfaceDescriptor* descriptor = instr->descriptor();
1157 
1158  LOperand* target = UseRegisterOrConstantAtStart(instr->target());
1159  ZoneList<LOperand*> ops(instr->OperandCount(), zone());
1160  ops.Add(target, zone());
1161  for (int i = 1; i < instr->OperandCount(); i++) {
1162  LOperand* op = UseFixed(instr->OperandAt(i),
1163  descriptor->GetParameterRegister(i - 1));
1164  ops.Add(op, zone());
1165  }
1166 
1167  LCallWithDescriptor* result = new(zone()) LCallWithDescriptor(
1168  descriptor, ops, zone());
1169  return MarkAsCall(DefineFixed(result, eax), instr, CANNOT_DEOPTIMIZE_EAGERLY);
1170 }
1171 
1172 
1173 LInstruction* LChunkBuilder::DoInvokeFunction(HInvokeFunction* instr) {
1174  LOperand* context = UseFixed(instr->context(), esi);
1175  LOperand* function = UseFixed(instr->function(), edi);
1176  LInvokeFunction* result = new(zone()) LInvokeFunction(context, function);
1177  return MarkAsCall(DefineFixed(result, eax), instr, CANNOT_DEOPTIMIZE_EAGERLY);
1178 }
1179 
1180 
1181 LInstruction* LChunkBuilder::DoUnaryMathOperation(HUnaryMathOperation* instr) {
1182  switch (instr->op()) {
1183  case kMathFloor: return DoMathFloor(instr);
1184  case kMathRound: return DoMathRound(instr);
1185  case kMathAbs: return DoMathAbs(instr);
1186  case kMathLog: return DoMathLog(instr);
1187  case kMathExp: return DoMathExp(instr);
1188  case kMathSqrt: return DoMathSqrt(instr);
1189  case kMathPowHalf: return DoMathPowHalf(instr);
1190  case kMathClz32: return DoMathClz32(instr);
1191  default:
1192  UNREACHABLE();
1193  return NULL;
1194  }
1195 }
1196 
1197 
1198 LInstruction* LChunkBuilder::DoMathFloor(HUnaryMathOperation* instr) {
1199  LOperand* input = UseRegisterAtStart(instr->value());
1200  LMathFloor* result = new(zone()) LMathFloor(input);
1201  return AssignEnvironment(DefineAsRegister(result));
1202 }
1203 
1204 
1205 LInstruction* LChunkBuilder::DoMathRound(HUnaryMathOperation* instr) {
1206  LOperand* input = UseRegister(instr->value());
1207  LOperand* temp = FixedTemp(xmm4);
1208  LMathRound* result = new(zone()) LMathRound(input, temp);
1209  return AssignEnvironment(DefineAsRegister(result));
1210 }
1211 
1212 
1213 LInstruction* LChunkBuilder::DoMathAbs(HUnaryMathOperation* instr) {
1214  LOperand* context = UseAny(instr->context()); // Deferred use.
1215  LOperand* input = UseRegisterAtStart(instr->value());
1216  LInstruction* result =
1217  DefineSameAsFirst(new(zone()) LMathAbs(context, input));
1218  Representation r = instr->value()->representation();
1219  if (!r.IsDouble() && !r.IsSmiOrInteger32()) result = AssignPointerMap(result);
1220  if (!r.IsDouble()) result = AssignEnvironment(result);
1221  return result;
1222 }
1223 
1224 
1225 LInstruction* LChunkBuilder::DoMathLog(HUnaryMathOperation* instr) {
1226  ASSERT(instr->representation().IsDouble());
1227  ASSERT(instr->value()->representation().IsDouble());
1228  LOperand* input = UseRegisterAtStart(instr->value());
1229  return MarkAsCall(DefineSameAsFirst(new(zone()) LMathLog(input)), instr);
1230 }
1231 
1232 
1233 LInstruction* LChunkBuilder::DoMathClz32(HUnaryMathOperation* instr) {
1234  LOperand* input = UseRegisterAtStart(instr->value());
1235  LMathClz32* result = new(zone()) LMathClz32(input);
1236  return DefineAsRegister(result);
1237 }
1238 
1239 
1240 LInstruction* LChunkBuilder::DoMathExp(HUnaryMathOperation* instr) {
1241  ASSERT(instr->representation().IsDouble());
1242  ASSERT(instr->value()->representation().IsDouble());
1243  LOperand* value = UseTempRegister(instr->value());
1244  LOperand* temp1 = TempRegister();
1245  LOperand* temp2 = TempRegister();
1246  LMathExp* result = new(zone()) LMathExp(value, temp1, temp2);
1247  return DefineAsRegister(result);
1248 }
1249 
1250 
1251 LInstruction* LChunkBuilder::DoMathSqrt(HUnaryMathOperation* instr) {
1252  LOperand* input = UseRegisterAtStart(instr->value());
1253  LMathSqrt* result = new(zone()) LMathSqrt(input);
1254  return DefineSameAsFirst(result);
1255 }
1256 
1257 
1258 LInstruction* LChunkBuilder::DoMathPowHalf(HUnaryMathOperation* instr) {
1259  LOperand* input = UseRegisterAtStart(instr->value());
1260  LOperand* temp = TempRegister();
1261  LMathPowHalf* result = new(zone()) LMathPowHalf(input, temp);
1262  return DefineSameAsFirst(result);
1263 }
1264 
1265 
1266 LInstruction* LChunkBuilder::DoCallNew(HCallNew* instr) {
1267  LOperand* context = UseFixed(instr->context(), esi);
1268  LOperand* constructor = UseFixed(instr->constructor(), edi);
1269  LCallNew* result = new(zone()) LCallNew(context, constructor);
1270  return MarkAsCall(DefineFixed(result, eax), instr);
1271 }
1272 
1273 
1274 LInstruction* LChunkBuilder::DoCallNewArray(HCallNewArray* instr) {
1275  LOperand* context = UseFixed(instr->context(), esi);
1276  LOperand* constructor = UseFixed(instr->constructor(), edi);
1277  LCallNewArray* result = new(zone()) LCallNewArray(context, constructor);
1278  return MarkAsCall(DefineFixed(result, eax), instr);
1279 }
1280 
1281 
1282 LInstruction* LChunkBuilder::DoCallFunction(HCallFunction* instr) {
1283  LOperand* context = UseFixed(instr->context(), esi);
1284  LOperand* function = UseFixed(instr->function(), edi);
1285  LCallFunction* call = new(zone()) LCallFunction(context, function);
1286  return MarkAsCall(DefineFixed(call, eax), instr);
1287 }
1288 
1289 
1290 LInstruction* LChunkBuilder::DoCallRuntime(HCallRuntime* instr) {
1291  LOperand* context = UseFixed(instr->context(), esi);
1292  return MarkAsCall(DefineFixed(new(zone()) LCallRuntime(context), eax), instr);
1293 }
1294 
1295 
1296 LInstruction* LChunkBuilder::DoRor(HRor* instr) {
1297  return DoShift(Token::ROR, instr);
1298 }
1299 
1300 
1301 LInstruction* LChunkBuilder::DoShr(HShr* instr) {
1302  return DoShift(Token::SHR, instr);
1303 }
1304 
1305 
1306 LInstruction* LChunkBuilder::DoSar(HSar* instr) {
1307  return DoShift(Token::SAR, instr);
1308 }
1309 
1310 
1311 LInstruction* LChunkBuilder::DoShl(HShl* instr) {
1312  return DoShift(Token::SHL, instr);
1313 }
1314 
1315 
1316 LInstruction* LChunkBuilder::DoBitwise(HBitwise* instr) {
1317  if (instr->representation().IsSmiOrInteger32()) {
1318  ASSERT(instr->left()->representation().Equals(instr->representation()));
1319  ASSERT(instr->right()->representation().Equals(instr->representation()));
1320  ASSERT(instr->CheckFlag(HValue::kTruncatingToInt32));
1321 
1322  LOperand* left = UseRegisterAtStart(instr->BetterLeftOperand());
1323  LOperand* right = UseOrConstantAtStart(instr->BetterRightOperand());
1324  return DefineSameAsFirst(new(zone()) LBitI(left, right));
1325  } else {
1326  return DoArithmeticT(instr->op(), instr);
1327  }
1328 }
1329 
1330 
1331 LInstruction* LChunkBuilder::DoDivByPowerOf2I(HDiv* instr) {
1332  ASSERT(instr->representation().IsSmiOrInteger32());
1333  ASSERT(instr->left()->representation().Equals(instr->representation()));
1334  ASSERT(instr->right()->representation().Equals(instr->representation()));
1335  LOperand* dividend = UseRegister(instr->left());
1336  int32_t divisor = instr->right()->GetInteger32Constant();
1337  LInstruction* result = DefineAsRegister(new(zone()) LDivByPowerOf2I(
1338  dividend, divisor));
1339  if ((instr->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) ||
1340  (instr->CheckFlag(HValue::kCanOverflow) && divisor == -1) ||
1341  (!instr->CheckFlag(HInstruction::kAllUsesTruncatingToInt32) &&
1342  divisor != 1 && divisor != -1)) {
1343  result = AssignEnvironment(result);
1344  }
1345  return result;
1346 }
1347 
1348 
1349 LInstruction* LChunkBuilder::DoDivByConstI(HDiv* instr) {
1350  ASSERT(instr->representation().IsInteger32());
1351  ASSERT(instr->left()->representation().Equals(instr->representation()));
1352  ASSERT(instr->right()->representation().Equals(instr->representation()));
1353  LOperand* dividend = UseRegister(instr->left());
1354  int32_t divisor = instr->right()->GetInteger32Constant();
1355  LOperand* temp1 = FixedTemp(eax);
1356  LOperand* temp2 = FixedTemp(edx);
1357  LInstruction* result = DefineFixed(new(zone()) LDivByConstI(
1358  dividend, divisor, temp1, temp2), edx);
1359  if (divisor == 0 ||
1360  (instr->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) ||
1361  !instr->CheckFlag(HInstruction::kAllUsesTruncatingToInt32)) {
1362  result = AssignEnvironment(result);
1363  }
1364  return result;
1365 }
1366 
1367 
1368 LInstruction* LChunkBuilder::DoDivI(HBinaryOperation* instr) {
1369  ASSERT(instr->representation().IsSmiOrInteger32());
1370  ASSERT(instr->left()->representation().Equals(instr->representation()));
1371  ASSERT(instr->right()->representation().Equals(instr->representation()));
1372  LOperand* dividend = UseFixed(instr->left(), eax);
1373  LOperand* divisor = UseRegister(instr->right());
1374  LOperand* temp = FixedTemp(edx);
1375  LInstruction* result = DefineFixed(new(zone()) LDivI(
1376  dividend, divisor, temp), eax);
1377  if (instr->CheckFlag(HValue::kCanBeDivByZero) ||
1378  instr->CheckFlag(HValue::kBailoutOnMinusZero) ||
1379  instr->CheckFlag(HValue::kCanOverflow) ||
1380  (!instr->IsMathFloorOfDiv() &&
1381  !instr->CheckFlag(HValue::kAllUsesTruncatingToInt32))) {
1382  result = AssignEnvironment(result);
1383  }
1384  return result;
1385 }
1386 
1387 
1388 LInstruction* LChunkBuilder::DoDiv(HDiv* instr) {
1389  if (instr->representation().IsSmiOrInteger32()) {
1390  if (instr->RightIsPowerOf2()) {
1391  return DoDivByPowerOf2I(instr);
1392  } else if (instr->right()->IsConstant()) {
1393  return DoDivByConstI(instr);
1394  } else {
1395  return DoDivI(instr);
1396  }
1397  } else if (instr->representation().IsDouble()) {
1398  return DoArithmeticD(Token::DIV, instr);
1399  } else {
1400  return DoArithmeticT(Token::DIV, instr);
1401  }
1402 }
1403 
1404 
1405 LInstruction* LChunkBuilder::DoFlooringDivByPowerOf2I(HMathFloorOfDiv* instr) {
1406  LOperand* dividend = UseRegisterAtStart(instr->left());
1407  int32_t divisor = instr->right()->GetInteger32Constant();
1408  LInstruction* result = DefineSameAsFirst(new(zone()) LFlooringDivByPowerOf2I(
1409  dividend, divisor));
1410  if ((instr->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) ||
1411  (instr->CheckFlag(HValue::kLeftCanBeMinInt) && divisor == -1)) {
1412  result = AssignEnvironment(result);
1413  }
1414  return result;
1415 }
1416 
1417 
1418 LInstruction* LChunkBuilder::DoFlooringDivByConstI(HMathFloorOfDiv* instr) {
1419  ASSERT(instr->representation().IsInteger32());
1420  ASSERT(instr->left()->representation().Equals(instr->representation()));
1421  ASSERT(instr->right()->representation().Equals(instr->representation()));
1422  LOperand* dividend = UseRegister(instr->left());
1423  int32_t divisor = instr->right()->GetInteger32Constant();
1424  LOperand* temp1 = FixedTemp(eax);
1425  LOperand* temp2 = FixedTemp(edx);
1426  LOperand* temp3 =
1427  ((divisor > 0 && !instr->CheckFlag(HValue::kLeftCanBeNegative)) ||
1428  (divisor < 0 && !instr->CheckFlag(HValue::kLeftCanBePositive))) ?
1429  NULL : TempRegister();
1430  LInstruction* result =
1431  DefineFixed(new(zone()) LFlooringDivByConstI(dividend,
1432  divisor,
1433  temp1,
1434  temp2,
1435  temp3),
1436  edx);
1437  if (divisor == 0 ||
1438  (instr->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0)) {
1439  result = AssignEnvironment(result);
1440  }
1441  return result;
1442 }
1443 
1444 
1445 LInstruction* LChunkBuilder::DoMathFloorOfDiv(HMathFloorOfDiv* instr) {
1446  if (instr->RightIsPowerOf2()) {
1447  return DoFlooringDivByPowerOf2I(instr);
1448  } else if (instr->right()->IsConstant()) {
1449  return DoFlooringDivByConstI(instr);
1450  } else {
1451  return DoDivI(instr);
1452  }
1453 }
1454 
1455 
1456 LInstruction* LChunkBuilder::DoModByPowerOf2I(HMod* instr) {
1457  ASSERT(instr->representation().IsSmiOrInteger32());
1458  ASSERT(instr->left()->representation().Equals(instr->representation()));
1459  ASSERT(instr->right()->representation().Equals(instr->representation()));
1460  LOperand* dividend = UseRegisterAtStart(instr->left());
1461  int32_t divisor = instr->right()->GetInteger32Constant();
1462  LInstruction* result = DefineSameAsFirst(new(zone()) LModByPowerOf2I(
1463  dividend, divisor));
1464  if (instr->CheckFlag(HValue::kBailoutOnMinusZero)) {
1465  result = AssignEnvironment(result);
1466  }
1467  return result;
1468 }
1469 
1470 
1471 LInstruction* LChunkBuilder::DoModByConstI(HMod* instr) {
1472  ASSERT(instr->representation().IsSmiOrInteger32());
1473  ASSERT(instr->left()->representation().Equals(instr->representation()));
1474  ASSERT(instr->right()->representation().Equals(instr->representation()));
1475  LOperand* dividend = UseRegister(instr->left());
1476  int32_t divisor = instr->right()->GetInteger32Constant();
1477  LOperand* temp1 = FixedTemp(eax);
1478  LOperand* temp2 = FixedTemp(edx);
1479  LInstruction* result = DefineFixed(new(zone()) LModByConstI(
1480  dividend, divisor, temp1, temp2), eax);
1481  if (divisor == 0 || instr->CheckFlag(HValue::kBailoutOnMinusZero)) {
1482  result = AssignEnvironment(result);
1483  }
1484  return result;
1485 }
1486 
1487 
1488 LInstruction* LChunkBuilder::DoModI(HMod* instr) {
1489  ASSERT(instr->representation().IsSmiOrInteger32());
1490  ASSERT(instr->left()->representation().Equals(instr->representation()));
1491  ASSERT(instr->right()->representation().Equals(instr->representation()));
1492  LOperand* dividend = UseFixed(instr->left(), eax);
1493  LOperand* divisor = UseRegister(instr->right());
1494  LOperand* temp = FixedTemp(edx);
1495  LInstruction* result = DefineFixed(new(zone()) LModI(
1496  dividend, divisor, temp), edx);
1497  if (instr->CheckFlag(HValue::kCanBeDivByZero) ||
1498  instr->CheckFlag(HValue::kBailoutOnMinusZero)) {
1499  result = AssignEnvironment(result);
1500  }
1501  return result;
1502 }
1503 
1504 
1505 LInstruction* LChunkBuilder::DoMod(HMod* instr) {
1506  if (instr->representation().IsSmiOrInteger32()) {
1507  if (instr->RightIsPowerOf2()) {
1508  return DoModByPowerOf2I(instr);
1509  } else if (instr->right()->IsConstant()) {
1510  return DoModByConstI(instr);
1511  } else {
1512  return DoModI(instr);
1513  }
1514  } else if (instr->representation().IsDouble()) {
1515  return DoArithmeticD(Token::MOD, instr);
1516  } else {
1517  return DoArithmeticT(Token::MOD, instr);
1518  }
1519 }
1520 
1521 
1522 LInstruction* LChunkBuilder::DoMul(HMul* instr) {
1523  if (instr->representation().IsSmiOrInteger32()) {
1524  ASSERT(instr->left()->representation().Equals(instr->representation()));
1525  ASSERT(instr->right()->representation().Equals(instr->representation()));
1526  LOperand* left = UseRegisterAtStart(instr->BetterLeftOperand());
1527  LOperand* right = UseOrConstant(instr->BetterRightOperand());
1528  LOperand* temp = NULL;
1529  if (instr->CheckFlag(HValue::kBailoutOnMinusZero)) {
1530  temp = TempRegister();
1531  }
1532  LMulI* mul = new(zone()) LMulI(left, right, temp);
1533  if (instr->CheckFlag(HValue::kCanOverflow) ||
1534  instr->CheckFlag(HValue::kBailoutOnMinusZero)) {
1535  AssignEnvironment(mul);
1536  }
1537  return DefineSameAsFirst(mul);
1538  } else if (instr->representation().IsDouble()) {
1539  return DoArithmeticD(Token::MUL, instr);
1540  } else {
1541  return DoArithmeticT(Token::MUL, instr);
1542  }
1543 }
1544 
1545 
1546 LInstruction* LChunkBuilder::DoSub(HSub* instr) {
1547  if (instr->representation().IsSmiOrInteger32()) {
1548  ASSERT(instr->left()->representation().Equals(instr->representation()));
1549  ASSERT(instr->right()->representation().Equals(instr->representation()));
1550  LOperand* left = UseRegisterAtStart(instr->left());
1551  LOperand* right = UseOrConstantAtStart(instr->right());
1552  LSubI* sub = new(zone()) LSubI(left, right);
1553  LInstruction* result = DefineSameAsFirst(sub);
1554  if (instr->CheckFlag(HValue::kCanOverflow)) {
1555  result = AssignEnvironment(result);
1556  }
1557  return result;
1558  } else if (instr->representation().IsDouble()) {
1559  return DoArithmeticD(Token::SUB, instr);
1560  } else {
1561  return DoArithmeticT(Token::SUB, instr);
1562  }
1563 }
1564 
1565 
1566 LInstruction* LChunkBuilder::DoAdd(HAdd* instr) {
1567  if (instr->representation().IsSmiOrInteger32()) {
1568  ASSERT(instr->left()->representation().Equals(instr->representation()));
1569  ASSERT(instr->right()->representation().Equals(instr->representation()));
1570  // Check to see if it would be advantageous to use an lea instruction rather
1571  // than an add. This is the case when no overflow check is needed and there
1572  // are multiple uses of the add's inputs, so using a 3-register add will
1573  // preserve all input values for later uses.
1574  bool use_lea = LAddI::UseLea(instr);
1575  LOperand* left = UseRegisterAtStart(instr->BetterLeftOperand());
1576  HValue* right_candidate = instr->BetterRightOperand();
1577  LOperand* right = use_lea
1578  ? UseRegisterOrConstantAtStart(right_candidate)
1579  : UseOrConstantAtStart(right_candidate);
1580  LAddI* add = new(zone()) LAddI(left, right);
1581  bool can_overflow = instr->CheckFlag(HValue::kCanOverflow);
1582  LInstruction* result = use_lea
1583  ? DefineAsRegister(add)
1584  : DefineSameAsFirst(add);
1585  if (can_overflow) {
1586  result = AssignEnvironment(result);
1587  }
1588  return result;
1589  } else if (instr->representation().IsDouble()) {
1590  return DoArithmeticD(Token::ADD, instr);
1591  } else if (instr->representation().IsExternal()) {
1592  ASSERT(instr->left()->representation().IsExternal());
1593  ASSERT(instr->right()->representation().IsInteger32());
1594  ASSERT(!instr->CheckFlag(HValue::kCanOverflow));
1595  bool use_lea = LAddI::UseLea(instr);
1596  LOperand* left = UseRegisterAtStart(instr->left());
1597  HValue* right_candidate = instr->right();
1598  LOperand* right = use_lea
1599  ? UseRegisterOrConstantAtStart(right_candidate)
1600  : UseOrConstantAtStart(right_candidate);
1601  LAddI* add = new(zone()) LAddI(left, right);
1602  LInstruction* result = use_lea
1603  ? DefineAsRegister(add)
1604  : DefineSameAsFirst(add);
1605  return result;
1606  } else {
1607  return DoArithmeticT(Token::ADD, instr);
1608  }
1609 }
1610 
1611 
1612 LInstruction* LChunkBuilder::DoMathMinMax(HMathMinMax* instr) {
1613  LOperand* left = NULL;
1614  LOperand* right = NULL;
1615  if (instr->representation().IsSmiOrInteger32()) {
1616  ASSERT(instr->left()->representation().Equals(instr->representation()));
1617  ASSERT(instr->right()->representation().Equals(instr->representation()));
1618  left = UseRegisterAtStart(instr->BetterLeftOperand());
1619  right = UseOrConstantAtStart(instr->BetterRightOperand());
1620  } else {
1621  ASSERT(instr->representation().IsDouble());
1622  ASSERT(instr->left()->representation().IsDouble());
1623  ASSERT(instr->right()->representation().IsDouble());
1624  left = UseRegisterAtStart(instr->left());
1625  right = UseRegisterAtStart(instr->right());
1626  }
1627  LMathMinMax* minmax = new(zone()) LMathMinMax(left, right);
1628  return DefineSameAsFirst(minmax);
1629 }
1630 
1631 
1632 LInstruction* LChunkBuilder::DoPower(HPower* instr) {
1633  ASSERT(instr->representation().IsDouble());
1634  // We call a C function for double power. It can't trigger a GC.
1635  // We need to use fixed result register for the call.
1636  Representation exponent_type = instr->right()->representation();
1637  ASSERT(instr->left()->representation().IsDouble());
1638  LOperand* left = UseFixedDouble(instr->left(), xmm2);
1639  LOperand* right = exponent_type.IsDouble() ?
1640  UseFixedDouble(instr->right(), xmm1) :
1641  UseFixed(instr->right(), eax);
1642  LPower* result = new(zone()) LPower(left, right);
1643  return MarkAsCall(DefineFixedDouble(result, xmm3), instr,
1644  CAN_DEOPTIMIZE_EAGERLY);
1645 }
1646 
1647 
1648 LInstruction* LChunkBuilder::DoCompareGeneric(HCompareGeneric* instr) {
1649  ASSERT(instr->left()->representation().IsSmiOrTagged());
1650  ASSERT(instr->right()->representation().IsSmiOrTagged());
1651  LOperand* context = UseFixed(instr->context(), esi);
1652  LOperand* left = UseFixed(instr->left(), edx);
1653  LOperand* right = UseFixed(instr->right(), eax);
1654  LCmpT* result = new(zone()) LCmpT(context, left, right);
1655  return MarkAsCall(DefineFixed(result, eax), instr);
1656 }
1657 
1658 
1659 LInstruction* LChunkBuilder::DoCompareNumericAndBranch(
1660  HCompareNumericAndBranch* instr) {
1661  Representation r = instr->representation();
1662  if (r.IsSmiOrInteger32()) {
1663  ASSERT(instr->left()->representation().Equals(r));
1664  ASSERT(instr->right()->representation().Equals(r));
1665  LOperand* left = UseRegisterOrConstantAtStart(instr->left());
1666  LOperand* right = UseOrConstantAtStart(instr->right());
1667  return new(zone()) LCompareNumericAndBranch(left, right);
1668  } else {
1669  ASSERT(r.IsDouble());
1670  ASSERT(instr->left()->representation().IsDouble());
1671  ASSERT(instr->right()->representation().IsDouble());
1672  LOperand* left;
1673  LOperand* right;
1674  if (CanBeImmediateConstant(instr->left()) &&
1675  CanBeImmediateConstant(instr->right())) {
1676  // The code generator requires either both inputs to be constant
1677  // operands, or neither.
1678  left = UseConstant(instr->left());
1679  right = UseConstant(instr->right());
1680  } else {
1681  left = UseRegisterAtStart(instr->left());
1682  right = UseRegisterAtStart(instr->right());
1683  }
1684  return new(zone()) LCompareNumericAndBranch(left, right);
1685  }
1686 }
1687 
1688 
1689 LInstruction* LChunkBuilder::DoCompareObjectEqAndBranch(
1690  HCompareObjectEqAndBranch* instr) {
1691  LInstruction* goto_instr = CheckElideControlInstruction(instr);
1692  if (goto_instr != NULL) return goto_instr;
1693  LOperand* left = UseRegisterAtStart(instr->left());
1694  LOperand* right = UseOrConstantAtStart(instr->right());
1695  return new(zone()) LCmpObjectEqAndBranch(left, right);
1696 }
1697 
1698 
1699 LInstruction* LChunkBuilder::DoCompareHoleAndBranch(
1700  HCompareHoleAndBranch* instr) {
1701  LOperand* value = UseRegisterAtStart(instr->value());
1702  return new(zone()) LCmpHoleAndBranch(value);
1703 }
1704 
1705 
1706 LInstruction* LChunkBuilder::DoCompareMinusZeroAndBranch(
1707  HCompareMinusZeroAndBranch* instr) {
1708  LInstruction* goto_instr = CheckElideControlInstruction(instr);
1709  if (goto_instr != NULL) return goto_instr;
1710  LOperand* value = UseRegister(instr->value());
1711  LOperand* scratch = TempRegister();
1712  return new(zone()) LCompareMinusZeroAndBranch(value, scratch);
1713 }
1714 
1715 
1716 LInstruction* LChunkBuilder::DoIsObjectAndBranch(HIsObjectAndBranch* instr) {
1717  ASSERT(instr->value()->representation().IsSmiOrTagged());
1718  LOperand* temp = TempRegister();
1719  return new(zone()) LIsObjectAndBranch(UseRegister(instr->value()), temp);
1720 }
1721 
1722 
1723 LInstruction* LChunkBuilder::DoIsStringAndBranch(HIsStringAndBranch* instr) {
1724  ASSERT(instr->value()->representation().IsTagged());
1725  LOperand* temp = TempRegister();
1726  return new(zone()) LIsStringAndBranch(UseRegister(instr->value()), temp);
1727 }
1728 
1729 
1730 LInstruction* LChunkBuilder::DoIsSmiAndBranch(HIsSmiAndBranch* instr) {
1731  ASSERT(instr->value()->representation().IsTagged());
1732  return new(zone()) LIsSmiAndBranch(Use(instr->value()));
1733 }
1734 
1735 
1736 LInstruction* LChunkBuilder::DoIsUndetectableAndBranch(
1737  HIsUndetectableAndBranch* instr) {
1738  ASSERT(instr->value()->representation().IsTagged());
1739  return new(zone()) LIsUndetectableAndBranch(
1740  UseRegisterAtStart(instr->value()), TempRegister());
1741 }
1742 
1743 
1744 LInstruction* LChunkBuilder::DoStringCompareAndBranch(
1745  HStringCompareAndBranch* instr) {
1746  ASSERT(instr->left()->representation().IsTagged());
1747  ASSERT(instr->right()->representation().IsTagged());
1748  LOperand* context = UseFixed(instr->context(), esi);
1749  LOperand* left = UseFixed(instr->left(), edx);
1750  LOperand* right = UseFixed(instr->right(), eax);
1751 
1752  LStringCompareAndBranch* result = new(zone())
1753  LStringCompareAndBranch(context, left, right);
1754 
1755  return MarkAsCall(result, instr);
1756 }
1757 
1758 
1759 LInstruction* LChunkBuilder::DoHasInstanceTypeAndBranch(
1760  HHasInstanceTypeAndBranch* instr) {
1761  ASSERT(instr->value()->representation().IsTagged());
1762  return new(zone()) LHasInstanceTypeAndBranch(
1763  UseRegisterAtStart(instr->value()),
1764  TempRegister());
1765 }
1766 
1767 
1768 LInstruction* LChunkBuilder::DoGetCachedArrayIndex(
1769  HGetCachedArrayIndex* instr) {
1770  ASSERT(instr->value()->representation().IsTagged());
1771  LOperand* value = UseRegisterAtStart(instr->value());
1772 
1773  return DefineAsRegister(new(zone()) LGetCachedArrayIndex(value));
1774 }
1775 
1776 
1777 LInstruction* LChunkBuilder::DoHasCachedArrayIndexAndBranch(
1778  HHasCachedArrayIndexAndBranch* instr) {
1779  ASSERT(instr->value()->representation().IsTagged());
1780  return new(zone()) LHasCachedArrayIndexAndBranch(
1781  UseRegisterAtStart(instr->value()));
1782 }
1783 
1784 
1785 LInstruction* LChunkBuilder::DoClassOfTestAndBranch(
1786  HClassOfTestAndBranch* instr) {
1787  ASSERT(instr->value()->representation().IsTagged());
1788  return new(zone()) LClassOfTestAndBranch(UseRegister(instr->value()),
1789  TempRegister(),
1790  TempRegister());
1791 }
1792 
1793 
1794 LInstruction* LChunkBuilder::DoMapEnumLength(HMapEnumLength* instr) {
1795  LOperand* map = UseRegisterAtStart(instr->value());
1796  return DefineAsRegister(new(zone()) LMapEnumLength(map));
1797 }
1798 
1799 
1800 LInstruction* LChunkBuilder::DoDateField(HDateField* instr) {
1801  LOperand* date = UseFixed(instr->value(), eax);
1802  LDateField* result =
1803  new(zone()) LDateField(date, FixedTemp(ecx), instr->index());
1804  return MarkAsCall(DefineFixed(result, eax), instr, CAN_DEOPTIMIZE_EAGERLY);
1805 }
1806 
1807 
1808 LInstruction* LChunkBuilder::DoSeqStringGetChar(HSeqStringGetChar* instr) {
1809  LOperand* string = UseRegisterAtStart(instr->string());
1810  LOperand* index = UseRegisterOrConstantAtStart(instr->index());
1811  return DefineAsRegister(new(zone()) LSeqStringGetChar(string, index));
1812 }
1813 
1814 
1815 LOperand* LChunkBuilder::GetSeqStringSetCharOperand(HSeqStringSetChar* instr) {
1816  if (instr->encoding() == String::ONE_BYTE_ENCODING) {
1817  if (FLAG_debug_code) {
1818  return UseFixed(instr->value(), eax);
1819  } else {
1820  return UseFixedOrConstant(instr->value(), eax);
1821  }
1822  } else {
1823  if (FLAG_debug_code) {
1824  return UseRegisterAtStart(instr->value());
1825  } else {
1826  return UseRegisterOrConstantAtStart(instr->value());
1827  }
1828  }
1829 }
1830 
1831 
1832 LInstruction* LChunkBuilder::DoSeqStringSetChar(HSeqStringSetChar* instr) {
1833  LOperand* string = UseRegisterAtStart(instr->string());
1834  LOperand* index = FLAG_debug_code
1835  ? UseRegisterAtStart(instr->index())
1836  : UseRegisterOrConstantAtStart(instr->index());
1837  LOperand* value = GetSeqStringSetCharOperand(instr);
1838  LOperand* context = FLAG_debug_code ? UseFixed(instr->context(), esi) : NULL;
1839  LInstruction* result = new(zone()) LSeqStringSetChar(context, string,
1840  index, value);
1841  if (FLAG_debug_code) {
1842  result = MarkAsCall(result, instr);
1843  }
1844  return result;
1845 }
1846 
1847 
1848 LInstruction* LChunkBuilder::DoBoundsCheck(HBoundsCheck* instr) {
1849  return AssignEnvironment(new(zone()) LBoundsCheck(
1850  UseRegisterOrConstantAtStart(instr->index()),
1851  UseAtStart(instr->length())));
1852 }
1853 
1854 
1855 LInstruction* LChunkBuilder::DoBoundsCheckBaseIndexInformation(
1856  HBoundsCheckBaseIndexInformation* instr) {
1857  UNREACHABLE();
1858  return NULL;
1859 }
1860 
1861 
1862 LInstruction* LChunkBuilder::DoAbnormalExit(HAbnormalExit* instr) {
1863  // The control instruction marking the end of a block that completed
1864  // abruptly (e.g., threw an exception). There is nothing specific to do.
1865  return NULL;
1866 }
1867 
1868 
1869 LInstruction* LChunkBuilder::DoUseConst(HUseConst* instr) {
1870  return NULL;
1871 }
1872 
1873 
1874 LInstruction* LChunkBuilder::DoForceRepresentation(HForceRepresentation* bad) {
1875  // All HForceRepresentation instructions should be eliminated in the
1876  // representation change phase of Hydrogen.
1877  UNREACHABLE();
1878  return NULL;
1879 }
1880 
1881 
1882 LInstruction* LChunkBuilder::DoChange(HChange* instr) {
1883  Representation from = instr->from();
1884  Representation to = instr->to();
1885  if (from.IsSmi()) {
1886  if (to.IsTagged()) {
1887  LOperand* value = UseRegister(instr->value());
1888  return DefineSameAsFirst(new(zone()) LDummyUse(value));
1889  }
1890  from = Representation::Tagged();
1891  }
1892  // Only mark conversions that might need to allocate as calling rather than
1893  // all changes. This makes simple, non-allocating conversion not have to force
1894  // building a stack frame.
1895  if (from.IsTagged()) {
1896  if (to.IsDouble()) {
1897  LOperand* value = UseRegister(instr->value());
1898  // Temp register only necessary for minus zero check.
1899  LOperand* temp = TempRegister();
1900  LInstruction* result = DefineAsRegister(
1901  new(zone()) LNumberUntagD(value, temp));
1902  if (!instr->value()->representation().IsSmi()) {
1903  result = AssignEnvironment(result);
1904  }
1905  return result;
1906  } else if (to.IsSmi()) {
1907  HValue* val = instr->value();
1908  LOperand* value = UseRegister(val);
1909  if (val->type().IsSmi()) {
1910  return DefineSameAsFirst(new(zone()) LDummyUse(value));
1911  }
1912  return AssignEnvironment(DefineSameAsFirst(new(zone()) LCheckSmi(value)));
1913  } else {
1914  ASSERT(to.IsInteger32());
1915  HValue* val = instr->value();
1916  if (val->type().IsSmi() || val->representation().IsSmi()) {
1917  LOperand* value = UseRegister(val);
1918  return DefineSameAsFirst(new(zone()) LSmiUntag(value, false));
1919  } else {
1920  bool truncating = instr->CanTruncateToInt32();
1921  LOperand* xmm_temp =
1922  (CpuFeatures::IsSafeForSnapshot(SSE2) && !truncating)
1923  ? FixedTemp(xmm1) : NULL;
1924  LInstruction* result = DefineSameAsFirst(
1925  new(zone()) LTaggedToI(UseRegister(val), xmm_temp));
1926  if (!instr->value()->representation().IsSmi()) {
1927  // Note: Only deopts in deferred code.
1928  result = AssignEnvironment(result);
1929  }
1930  return result;
1931  }
1932  }
1933  } else if (from.IsDouble()) {
1934  if (to.IsTagged()) {
1935  info()->MarkAsDeferredCalling();
1936  LOperand* value = UseRegisterAtStart(instr->value());
1937  LOperand* temp = FLAG_inline_new ? TempRegister() : NULL;
1938 
1939  // Make sure that temp and result_temp are different registers.
1940  LUnallocated* result_temp = TempRegister();
1941  LNumberTagD* result = new(zone()) LNumberTagD(value, temp);
1942  return AssignPointerMap(Define(result, result_temp));
1943  } else if (to.IsSmi()) {
1944  LOperand* value = UseRegister(instr->value());
1945  return AssignEnvironment(
1946  DefineAsRegister(new(zone()) LDoubleToSmi(value)));
1947  } else {
1948  ASSERT(to.IsInteger32());
1949  bool truncating = instr->CanTruncateToInt32();
1950  bool needs_temp = CpuFeatures::IsSafeForSnapshot(SSE2) && !truncating;
1951  LOperand* value = needs_temp ?
1952  UseTempRegister(instr->value()) : UseRegister(instr->value());
1953  LOperand* temp = needs_temp ? TempRegister() : NULL;
1954  LInstruction* result =
1955  DefineAsRegister(new(zone()) LDoubleToI(value, temp));
1956  if (!truncating) result = AssignEnvironment(result);
1957  return result;
1958  }
1959  } else if (from.IsInteger32()) {
1960  info()->MarkAsDeferredCalling();
1961  if (to.IsTagged()) {
1962  HValue* val = instr->value();
1963  LOperand* value = UseRegister(val);
1964  if (!instr->CheckFlag(HValue::kCanOverflow)) {
1965  return DefineSameAsFirst(new(zone()) LSmiTag(value));
1966  } else if (val->CheckFlag(HInstruction::kUint32)) {
1967  LOperand* temp1 = TempRegister();
1968  LOperand* temp2 = CpuFeatures::IsSupported(SSE2) ? FixedTemp(xmm1)
1969  : NULL;
1970  LNumberTagU* result = new(zone()) LNumberTagU(value, temp1, temp2);
1971  return AssignPointerMap(DefineSameAsFirst(result));
1972  } else {
1973  LOperand* temp = TempRegister();
1974  LNumberTagI* result = new(zone()) LNumberTagI(value, temp);
1975  return AssignPointerMap(DefineSameAsFirst(result));
1976  }
1977  } else if (to.IsSmi()) {
1978  HValue* val = instr->value();
1979  LOperand* value = UseRegister(val);
1980  LInstruction* result = DefineSameAsFirst(new(zone()) LSmiTag(value));
1981  if (instr->CheckFlag(HValue::kCanOverflow)) {
1982  result = AssignEnvironment(result);
1983  }
1984  return result;
1985  } else {
1986  ASSERT(to.IsDouble());
1987  if (instr->value()->CheckFlag(HInstruction::kUint32)) {
1988  LOperand* temp = FixedTemp(xmm1);
1989  return DefineAsRegister(
1990  new(zone()) LUint32ToDouble(UseRegister(instr->value()), temp));
1991  } else {
1992  return DefineAsRegister(
1993  new(zone()) LInteger32ToDouble(Use(instr->value())));
1994  }
1995  }
1996  }
1997  UNREACHABLE();
1998  return NULL;
1999 }
2000 
2001 
2002 LInstruction* LChunkBuilder::DoCheckHeapObject(HCheckHeapObject* instr) {
2003  LOperand* value = UseAtStart(instr->value());
2004  return AssignEnvironment(new(zone()) LCheckNonSmi(value));
2005 }
2006 
2007 
2008 LInstruction* LChunkBuilder::DoCheckSmi(HCheckSmi* instr) {
2009  LOperand* value = UseRegisterAtStart(instr->value());
2010  return AssignEnvironment(new(zone()) LCheckSmi(value));
2011 }
2012 
2013 
2014 LInstruction* LChunkBuilder::DoCheckInstanceType(HCheckInstanceType* instr) {
2015  LOperand* value = UseRegisterAtStart(instr->value());
2016  LOperand* temp = TempRegister();
2017  LCheckInstanceType* result = new(zone()) LCheckInstanceType(value, temp);
2018  return AssignEnvironment(result);
2019 }
2020 
2021 
2022 LInstruction* LChunkBuilder::DoCheckValue(HCheckValue* instr) {
2023  // If the object is in new space, we'll emit a global cell compare and so
2024  // want the value in a register. If the object gets promoted before we
2025  // emit code, we will still get the register but will do an immediate
2026  // compare instead of the cell compare. This is safe.
2027  LOperand* value = instr->object_in_new_space()
2028  ? UseRegisterAtStart(instr->value()) : UseAtStart(instr->value());
2029  return AssignEnvironment(new(zone()) LCheckValue(value));
2030 }
2031 
2032 
2033 LInstruction* LChunkBuilder::DoCheckMaps(HCheckMaps* instr) {
2034  LOperand* value = NULL;
2035  if (!instr->CanOmitMapChecks()) {
2036  value = UseRegisterAtStart(instr->value());
2037  if (instr->has_migration_target()) info()->MarkAsDeferredCalling();
2038  }
2039  LCheckMaps* result = new(zone()) LCheckMaps(value);
2040  if (!instr->CanOmitMapChecks()) {
2041  // Note: Only deopts in deferred code.
2042  AssignEnvironment(result);
2043  if (instr->has_migration_target()) return AssignPointerMap(result);
2044  }
2045  return result;
2046 }
2047 
2048 
2049 LInstruction* LChunkBuilder::DoClampToUint8(HClampToUint8* instr) {
2050  HValue* value = instr->value();
2051  Representation input_rep = value->representation();
2052  if (input_rep.IsDouble()) {
2053  LOperand* reg = UseRegister(value);
2054  return DefineFixed(new(zone()) LClampDToUint8(reg), eax);
2055  } else if (input_rep.IsInteger32()) {
2056  LOperand* reg = UseFixed(value, eax);
2057  return DefineFixed(new(zone()) LClampIToUint8(reg), eax);
2058  } else {
2059  ASSERT(input_rep.IsSmiOrTagged());
2061  LOperand* reg = UseFixed(value, eax);
2062  // Register allocator doesn't (yet) support allocation of double
2063  // temps. Reserve xmm1 explicitly.
2064  LOperand* temp = FixedTemp(xmm1);
2065  LClampTToUint8* result = new(zone()) LClampTToUint8(reg, temp);
2066  return AssignEnvironment(DefineFixed(result, eax));
2067  } else {
2068  LOperand* value = UseRegister(instr->value());
2069  LClampTToUint8NoSSE2* res =
2070  new(zone()) LClampTToUint8NoSSE2(value, TempRegister(),
2071  TempRegister(), TempRegister());
2072  return AssignEnvironment(DefineFixed(res, ecx));
2073  }
2074  }
2075 }
2076 
2077 
2078 LInstruction* LChunkBuilder::DoDoubleBits(HDoubleBits* instr) {
2079  HValue* value = instr->value();
2080  ASSERT(value->representation().IsDouble());
2081  return DefineAsRegister(new(zone()) LDoubleBits(UseRegister(value)));
2082 }
2083 
2084 
2085 LInstruction* LChunkBuilder::DoConstructDouble(HConstructDouble* instr) {
2086  LOperand* lo = UseRegister(instr->lo());
2087  LOperand* hi = UseRegister(instr->hi());
2088  return DefineAsRegister(new(zone()) LConstructDouble(hi, lo));
2089 }
2090 
2091 
2092 LInstruction* LChunkBuilder::DoReturn(HReturn* instr) {
2093  LOperand* context = info()->IsStub() ? UseFixed(instr->context(), esi) : NULL;
2094  LOperand* parameter_count = UseRegisterOrConstant(instr->parameter_count());
2095  return new(zone()) LReturn(
2096  UseFixed(instr->value(), eax), context, parameter_count);
2097 }
2098 
2099 
2100 LInstruction* LChunkBuilder::DoConstant(HConstant* instr) {
2101  Representation r = instr->representation();
2102  if (r.IsSmi()) {
2103  return DefineAsRegister(new(zone()) LConstantS);
2104  } else if (r.IsInteger32()) {
2105  return DefineAsRegister(new(zone()) LConstantI);
2106  } else if (r.IsDouble()) {
2107  double value = instr->DoubleValue();
2108  bool value_is_zero = BitCast<uint64_t, double>(value) == 0;
2109  LOperand* temp = value_is_zero ? NULL : TempRegister();
2110  return DefineAsRegister(new(zone()) LConstantD(temp));
2111  } else if (r.IsExternal()) {
2112  return DefineAsRegister(new(zone()) LConstantE);
2113  } else if (r.IsTagged()) {
2114  return DefineAsRegister(new(zone()) LConstantT);
2115  } else {
2116  UNREACHABLE();
2117  return NULL;
2118  }
2119 }
2120 
2121 
2122 LInstruction* LChunkBuilder::DoLoadGlobalCell(HLoadGlobalCell* instr) {
2123  LLoadGlobalCell* result = new(zone()) LLoadGlobalCell;
2124  return instr->RequiresHoleCheck()
2125  ? AssignEnvironment(DefineAsRegister(result))
2126  : DefineAsRegister(result);
2127 }
2128 
2129 
2130 LInstruction* LChunkBuilder::DoLoadGlobalGeneric(HLoadGlobalGeneric* instr) {
2131  LOperand* context = UseFixed(instr->context(), esi);
2132  LOperand* global_object = UseFixed(instr->global_object(), edx);
2133  LLoadGlobalGeneric* result =
2134  new(zone()) LLoadGlobalGeneric(context, global_object);
2135  return MarkAsCall(DefineFixed(result, eax), instr);
2136 }
2137 
2138 
2139 LInstruction* LChunkBuilder::DoStoreGlobalCell(HStoreGlobalCell* instr) {
2140  LStoreGlobalCell* result =
2141  new(zone()) LStoreGlobalCell(UseRegister(instr->value()));
2142  return instr->RequiresHoleCheck() ? AssignEnvironment(result) : result;
2143 }
2144 
2145 
2146 LInstruction* LChunkBuilder::DoLoadContextSlot(HLoadContextSlot* instr) {
2147  LOperand* context = UseRegisterAtStart(instr->value());
2148  LInstruction* result =
2149  DefineAsRegister(new(zone()) LLoadContextSlot(context));
2150  if (instr->RequiresHoleCheck() && instr->DeoptimizesOnHole()) {
2151  result = AssignEnvironment(result);
2152  }
2153  return result;
2154 }
2155 
2156 
2157 LInstruction* LChunkBuilder::DoStoreContextSlot(HStoreContextSlot* instr) {
2158  LOperand* value;
2159  LOperand* temp;
2160  LOperand* context = UseRegister(instr->context());
2161  if (instr->NeedsWriteBarrier()) {
2162  value = UseTempRegister(instr->value());
2163  temp = TempRegister();
2164  } else {
2165  value = UseRegister(instr->value());
2166  temp = NULL;
2167  }
2168  LInstruction* result = new(zone()) LStoreContextSlot(context, value, temp);
2169  if (instr->RequiresHoleCheck() && instr->DeoptimizesOnHole()) {
2170  result = AssignEnvironment(result);
2171  }
2172  return result;
2173 }
2174 
2175 
2176 LInstruction* LChunkBuilder::DoLoadNamedField(HLoadNamedField* instr) {
2177  LOperand* obj = (instr->access().IsExternalMemory() &&
2178  instr->access().offset() == 0)
2179  ? UseRegisterOrConstantAtStart(instr->object())
2180  : UseRegisterAtStart(instr->object());
2181  return DefineAsRegister(new(zone()) LLoadNamedField(obj));
2182 }
2183 
2184 
2185 LInstruction* LChunkBuilder::DoLoadNamedGeneric(HLoadNamedGeneric* instr) {
2186  LOperand* context = UseFixed(instr->context(), esi);
2187  LOperand* object = UseFixed(instr->object(), edx);
2188  LLoadNamedGeneric* result = new(zone()) LLoadNamedGeneric(context, object);
2189  return MarkAsCall(DefineFixed(result, eax), instr);
2190 }
2191 
2192 
2193 LInstruction* LChunkBuilder::DoLoadFunctionPrototype(
2194  HLoadFunctionPrototype* instr) {
2195  return AssignEnvironment(DefineAsRegister(
2196  new(zone()) LLoadFunctionPrototype(UseRegister(instr->function()),
2197  TempRegister())));
2198 }
2199 
2200 
2201 LInstruction* LChunkBuilder::DoLoadRoot(HLoadRoot* instr) {
2202  return DefineAsRegister(new(zone()) LLoadRoot);
2203 }
2204 
2205 
2206 LInstruction* LChunkBuilder::DoLoadKeyed(HLoadKeyed* instr) {
2207  ASSERT(instr->key()->representation().IsSmiOrInteger32());
2208  ElementsKind elements_kind = instr->elements_kind();
2209  bool clobbers_key = ExternalArrayOpRequiresTemp(
2210  instr->key()->representation(), elements_kind);
2211  LOperand* key = clobbers_key
2212  ? UseTempRegister(instr->key())
2213  : UseRegisterOrConstantAtStart(instr->key());
2214  LInstruction* result = NULL;
2215 
2216  if (!instr->is_typed_elements()) {
2217  LOperand* obj = UseRegisterAtStart(instr->elements());
2218  result = DefineAsRegister(new(zone()) LLoadKeyed(obj, key));
2219  } else {
2220  ASSERT(
2221  (instr->representation().IsInteger32() &&
2222  !(IsDoubleOrFloatElementsKind(instr->elements_kind()))) ||
2223  (instr->representation().IsDouble() &&
2224  (IsDoubleOrFloatElementsKind(instr->elements_kind()))));
2225  LOperand* backing_store = UseRegister(instr->elements());
2226  result = DefineAsRegister(new(zone()) LLoadKeyed(backing_store, key));
2227  }
2228 
2229  if ((instr->is_external() || instr->is_fixed_typed_array()) ?
2230  // see LCodeGen::DoLoadKeyedExternalArray
2231  ((instr->elements_kind() == EXTERNAL_UINT32_ELEMENTS ||
2232  instr->elements_kind() == UINT32_ELEMENTS) &&
2233  !instr->CheckFlag(HInstruction::kUint32)) :
2234  // see LCodeGen::DoLoadKeyedFixedDoubleArray and
2235  // LCodeGen::DoLoadKeyedFixedArray
2236  instr->RequiresHoleCheck()) {
2237  result = AssignEnvironment(result);
2238  }
2239  return result;
2240 }
2241 
2242 
2243 LInstruction* LChunkBuilder::DoLoadKeyedGeneric(HLoadKeyedGeneric* instr) {
2244  LOperand* context = UseFixed(instr->context(), esi);
2245  LOperand* object = UseFixed(instr->object(), edx);
2246  LOperand* key = UseFixed(instr->key(), ecx);
2247 
2248  LLoadKeyedGeneric* result =
2249  new(zone()) LLoadKeyedGeneric(context, object, key);
2250  return MarkAsCall(DefineFixed(result, eax), instr);
2251 }
2252 
2253 
2254 LOperand* LChunkBuilder::GetStoreKeyedValueOperand(HStoreKeyed* instr) {
2255  ElementsKind elements_kind = instr->elements_kind();
2256 
2257  // Determine if we need a byte register in this case for the value.
2258  bool val_is_fixed_register =
2259  elements_kind == EXTERNAL_INT8_ELEMENTS ||
2260  elements_kind == EXTERNAL_UINT8_ELEMENTS ||
2261  elements_kind == EXTERNAL_UINT8_CLAMPED_ELEMENTS ||
2262  elements_kind == UINT8_ELEMENTS ||
2263  elements_kind == INT8_ELEMENTS ||
2264  elements_kind == UINT8_CLAMPED_ELEMENTS;
2265  if (val_is_fixed_register) {
2266  return UseFixed(instr->value(), eax);
2267  }
2268 
2270  IsDoubleOrFloatElementsKind(elements_kind)) {
2271  return UseRegisterAtStart(instr->value());
2272  }
2273 
2274  return UseRegister(instr->value());
2275 }
2276 
2277 
2278 LInstruction* LChunkBuilder::DoStoreKeyed(HStoreKeyed* instr) {
2279  if (!instr->is_typed_elements()) {
2280  ASSERT(instr->elements()->representation().IsTagged());
2281  ASSERT(instr->key()->representation().IsInteger32() ||
2282  instr->key()->representation().IsSmi());
2283 
2284  if (instr->value()->representation().IsDouble()) {
2285  LOperand* object = UseRegisterAtStart(instr->elements());
2286  LOperand* val = NULL;
2287  val = UseRegisterAtStart(instr->value());
2288  LOperand* key = UseRegisterOrConstantAtStart(instr->key());
2289  return new(zone()) LStoreKeyed(object, key, val);
2290  } else {
2291  ASSERT(instr->value()->representation().IsSmiOrTagged());
2292  bool needs_write_barrier = instr->NeedsWriteBarrier();
2293 
2294  LOperand* obj = UseRegister(instr->elements());
2295  LOperand* val;
2296  LOperand* key;
2297  if (needs_write_barrier) {
2298  val = UseTempRegister(instr->value());
2299  key = UseTempRegister(instr->key());
2300  } else {
2301  val = UseRegisterOrConstantAtStart(instr->value());
2302  key = UseRegisterOrConstantAtStart(instr->key());
2303  }
2304  return new(zone()) LStoreKeyed(obj, key, val);
2305  }
2306  }
2307 
2308  ElementsKind elements_kind = instr->elements_kind();
2309  ASSERT(
2310  (instr->value()->representation().IsInteger32() &&
2311  !IsDoubleOrFloatElementsKind(elements_kind)) ||
2312  (instr->value()->representation().IsDouble() &&
2313  IsDoubleOrFloatElementsKind(elements_kind)));
2314  ASSERT((instr->is_fixed_typed_array() &&
2315  instr->elements()->representation().IsTagged()) ||
2316  (instr->is_external() &&
2317  instr->elements()->representation().IsExternal()));
2318 
2319  LOperand* backing_store = UseRegister(instr->elements());
2320  LOperand* val = GetStoreKeyedValueOperand(instr);
2321  bool clobbers_key = ExternalArrayOpRequiresTemp(
2322  instr->key()->representation(), elements_kind);
2323  LOperand* key = clobbers_key
2324  ? UseTempRegister(instr->key())
2325  : UseRegisterOrConstantAtStart(instr->key());
2326  return new(zone()) LStoreKeyed(backing_store, key, val);
2327 }
2328 
2329 
2330 LInstruction* LChunkBuilder::DoStoreKeyedGeneric(HStoreKeyedGeneric* instr) {
2331  LOperand* context = UseFixed(instr->context(), esi);
2332  LOperand* object = UseFixed(instr->object(), edx);
2333  LOperand* key = UseFixed(instr->key(), ecx);
2334  LOperand* value = UseFixed(instr->value(), eax);
2335 
2336  ASSERT(instr->object()->representation().IsTagged());
2337  ASSERT(instr->key()->representation().IsTagged());
2338  ASSERT(instr->value()->representation().IsTagged());
2339 
2340  LStoreKeyedGeneric* result =
2341  new(zone()) LStoreKeyedGeneric(context, object, key, value);
2342  return MarkAsCall(result, instr);
2343 }
2344 
2345 
2346 LInstruction* LChunkBuilder::DoTransitionElementsKind(
2347  HTransitionElementsKind* instr) {
2348  LOperand* object = UseRegister(instr->object());
2349  if (IsSimpleMapChangeTransition(instr->from_kind(), instr->to_kind())) {
2350  LOperand* object = UseRegister(instr->object());
2351  LOperand* new_map_reg = TempRegister();
2352  LOperand* temp_reg = TempRegister();
2353  LTransitionElementsKind* result =
2354  new(zone()) LTransitionElementsKind(object, NULL,
2355  new_map_reg, temp_reg);
2356  return result;
2357  } else {
2358  LOperand* context = UseFixed(instr->context(), esi);
2359  LTransitionElementsKind* result =
2360  new(zone()) LTransitionElementsKind(object, context, NULL, NULL);
2361  return AssignPointerMap(result);
2362  }
2363 }
2364 
2365 
2366 LInstruction* LChunkBuilder::DoTrapAllocationMemento(
2367  HTrapAllocationMemento* instr) {
2368  LOperand* object = UseRegister(instr->object());
2369  LOperand* temp = TempRegister();
2370  LTrapAllocationMemento* result =
2371  new(zone()) LTrapAllocationMemento(object, temp);
2372  return AssignEnvironment(result);
2373 }
2374 
2375 
2376 LInstruction* LChunkBuilder::DoStoreNamedField(HStoreNamedField* instr) {
2377  bool is_in_object = instr->access().IsInobject();
2378  bool is_external_location = instr->access().IsExternalMemory() &&
2379  instr->access().offset() == 0;
2380  bool needs_write_barrier = instr->NeedsWriteBarrier();
2381  bool needs_write_barrier_for_map = instr->has_transition() &&
2382  instr->NeedsWriteBarrierForMap();
2383 
2384  LOperand* obj;
2385  if (needs_write_barrier) {
2386  obj = is_in_object
2387  ? UseRegister(instr->object())
2388  : UseTempRegister(instr->object());
2389  } else if (is_external_location) {
2390  ASSERT(!is_in_object);
2391  ASSERT(!needs_write_barrier);
2392  ASSERT(!needs_write_barrier_for_map);
2393  obj = UseRegisterOrConstant(instr->object());
2394  } else {
2395  obj = needs_write_barrier_for_map
2396  ? UseRegister(instr->object())
2397  : UseRegisterAtStart(instr->object());
2398  }
2399 
2400  bool can_be_constant = instr->value()->IsConstant() &&
2401  HConstant::cast(instr->value())->NotInNewSpace() &&
2402  !instr->field_representation().IsDouble();
2403 
2404  LOperand* val;
2405  if (instr->field_representation().IsInteger8() ||
2406  instr->field_representation().IsUInteger8()) {
2407  // mov_b requires a byte register (i.e. any of eax, ebx, ecx, edx).
2408  // Just force the value to be in eax and we're safe here.
2409  val = UseFixed(instr->value(), eax);
2410  } else if (needs_write_barrier) {
2411  val = UseTempRegister(instr->value());
2412  } else if (can_be_constant) {
2413  val = UseRegisterOrConstant(instr->value());
2414  } else if (instr->field_representation().IsSmi()) {
2415  val = UseTempRegister(instr->value());
2416  } else if (instr->field_representation().IsDouble()) {
2417  val = UseRegisterAtStart(instr->value());
2418  } else {
2419  val = UseRegister(instr->value());
2420  }
2421 
2422  // We only need a scratch register if we have a write barrier or we
2423  // have a store into the properties array (not in-object-property).
2424  LOperand* temp = (!is_in_object || needs_write_barrier ||
2425  needs_write_barrier_for_map) ? TempRegister() : NULL;
2426 
2427  // We need a temporary register for write barrier of the map field.
2428  LOperand* temp_map = needs_write_barrier_for_map ? TempRegister() : NULL;
2429 
2430  LInstruction* result =
2431  new(zone()) LStoreNamedField(obj, val, temp, temp_map);
2432  if (!instr->access().IsExternalMemory() &&
2433  instr->field_representation().IsHeapObject() &&
2434  (val->IsConstantOperand()
2435  ? HConstant::cast(instr->value())->HasSmiValue()
2436  : !instr->value()->type().IsHeapObject())) {
2437  result = AssignEnvironment(result);
2438  }
2439  return result;
2440 }
2441 
2442 
2443 LInstruction* LChunkBuilder::DoStoreNamedGeneric(HStoreNamedGeneric* instr) {
2444  LOperand* context = UseFixed(instr->context(), esi);
2445  LOperand* object = UseFixed(instr->object(), edx);
2446  LOperand* value = UseFixed(instr->value(), eax);
2447 
2448  LStoreNamedGeneric* result =
2449  new(zone()) LStoreNamedGeneric(context, object, value);
2450  return MarkAsCall(result, instr);
2451 }
2452 
2453 
2454 LInstruction* LChunkBuilder::DoStringAdd(HStringAdd* instr) {
2455  LOperand* context = UseFixed(instr->context(), esi);
2456  LOperand* left = UseFixed(instr->left(), edx);
2457  LOperand* right = UseFixed(instr->right(), eax);
2458  LStringAdd* string_add = new(zone()) LStringAdd(context, left, right);
2459  return MarkAsCall(DefineFixed(string_add, eax), instr);
2460 }
2461 
2462 
2463 LInstruction* LChunkBuilder::DoStringCharCodeAt(HStringCharCodeAt* instr) {
2464  LOperand* string = UseTempRegister(instr->string());
2465  LOperand* index = UseTempRegister(instr->index());
2466  LOperand* context = UseAny(instr->context());
2467  LStringCharCodeAt* result =
2468  new(zone()) LStringCharCodeAt(context, string, index);
2469  return AssignPointerMap(DefineAsRegister(result));
2470 }
2471 
2472 
2473 LInstruction* LChunkBuilder::DoStringCharFromCode(HStringCharFromCode* instr) {
2474  LOperand* char_code = UseRegister(instr->value());
2475  LOperand* context = UseAny(instr->context());
2476  LStringCharFromCode* result =
2477  new(zone()) LStringCharFromCode(context, char_code);
2478  return AssignPointerMap(DefineAsRegister(result));
2479 }
2480 
2481 
2482 LInstruction* LChunkBuilder::DoAllocate(HAllocate* instr) {
2483  info()->MarkAsDeferredCalling();
2484  LOperand* context = UseAny(instr->context());
2485  LOperand* size = instr->size()->IsConstant()
2486  ? UseConstant(instr->size())
2487  : UseTempRegister(instr->size());
2488  LOperand* temp = TempRegister();
2489  LAllocate* result = new(zone()) LAllocate(context, size, temp);
2490  return AssignPointerMap(DefineAsRegister(result));
2491 }
2492 
2493 
2494 LInstruction* LChunkBuilder::DoRegExpLiteral(HRegExpLiteral* instr) {
2495  LOperand* context = UseFixed(instr->context(), esi);
2496  return MarkAsCall(
2497  DefineFixed(new(zone()) LRegExpLiteral(context), eax), instr);
2498 }
2499 
2500 
2501 LInstruction* LChunkBuilder::DoFunctionLiteral(HFunctionLiteral* instr) {
2502  LOperand* context = UseFixed(instr->context(), esi);
2503  return MarkAsCall(
2504  DefineFixed(new(zone()) LFunctionLiteral(context), eax), instr);
2505 }
2506 
2507 
2508 LInstruction* LChunkBuilder::DoOsrEntry(HOsrEntry* instr) {
2509  ASSERT(argument_count_ == 0);
2510  allocator_->MarkAsOsrEntry();
2511  current_block_->last_environment()->set_ast_id(instr->ast_id());
2512  return AssignEnvironment(new(zone()) LOsrEntry);
2513 }
2514 
2515 
2516 LInstruction* LChunkBuilder::DoParameter(HParameter* instr) {
2517  LParameter* result = new(zone()) LParameter;
2518  if (instr->kind() == HParameter::STACK_PARAMETER) {
2519  int spill_index = chunk()->GetParameterStackSlot(instr->index());
2520  return DefineAsSpilled(result, spill_index);
2521  } else {
2522  ASSERT(info()->IsStub());
2523  CodeStubInterfaceDescriptor* descriptor =
2524  info()->code_stub()->GetInterfaceDescriptor(info()->isolate());
2525  int index = static_cast<int>(instr->index());
2526  Register reg = descriptor->GetParameterRegister(index);
2527  return DefineFixed(result, reg);
2528  }
2529 }
2530 
2531 
2532 LInstruction* LChunkBuilder::DoUnknownOSRValue(HUnknownOSRValue* instr) {
2533  // Use an index that corresponds to the location in the unoptimized frame,
2534  // which the optimized frame will subsume.
2535  int env_index = instr->index();
2536  int spill_index = 0;
2537  if (instr->environment()->is_parameter_index(env_index)) {
2538  spill_index = chunk()->GetParameterStackSlot(env_index);
2539  } else {
2540  spill_index = env_index - instr->environment()->first_local_index();
2541  if (spill_index > LUnallocated::kMaxFixedSlotIndex) {
2542  Abort(kNotEnoughSpillSlotsForOsr);
2543  spill_index = 0;
2544  }
2545  if (spill_index == 0) {
2546  // The dynamic frame alignment state overwrites the first local.
2547  // The first local is saved at the end of the unoptimized frame.
2548  spill_index = graph()->osr()->UnoptimizedFrameSlots();
2549  }
2550  }
2551  return DefineAsSpilled(new(zone()) LUnknownOSRValue, spill_index);
2552 }
2553 
2554 
2555 LInstruction* LChunkBuilder::DoCallStub(HCallStub* instr) {
2556  LOperand* context = UseFixed(instr->context(), esi);
2557  LCallStub* result = new(zone()) LCallStub(context);
2558  return MarkAsCall(DefineFixed(result, eax), instr);
2559 }
2560 
2561 
2562 LInstruction* LChunkBuilder::DoArgumentsObject(HArgumentsObject* instr) {
2563  // There are no real uses of the arguments object.
2564  // arguments.length and element access are supported directly on
2565  // stack arguments, and any real arguments object use causes a bailout.
2566  // So this value is never used.
2567  return NULL;
2568 }
2569 
2570 
2571 LInstruction* LChunkBuilder::DoCapturedObject(HCapturedObject* instr) {
2572  instr->ReplayEnvironment(current_block_->last_environment());
2573 
2574  // There are no real uses of a captured object.
2575  return NULL;
2576 }
2577 
2578 
2579 LInstruction* LChunkBuilder::DoAccessArgumentsAt(HAccessArgumentsAt* instr) {
2580  info()->MarkAsRequiresFrame();
2581  LOperand* args = UseRegister(instr->arguments());
2582  LOperand* length;
2583  LOperand* index;
2584  if (instr->length()->IsConstant() && instr->index()->IsConstant()) {
2585  length = UseRegisterOrConstant(instr->length());
2586  index = UseOrConstant(instr->index());
2587  } else {
2588  length = UseTempRegister(instr->length());
2589  index = Use(instr->index());
2590  }
2591  return DefineAsRegister(new(zone()) LAccessArgumentsAt(args, length, index));
2592 }
2593 
2594 
2595 LInstruction* LChunkBuilder::DoToFastProperties(HToFastProperties* instr) {
2596  LOperand* object = UseFixed(instr->value(), eax);
2597  LToFastProperties* result = new(zone()) LToFastProperties(object);
2598  return MarkAsCall(DefineFixed(result, eax), instr);
2599 }
2600 
2601 
2602 LInstruction* LChunkBuilder::DoTypeof(HTypeof* instr) {
2603  LOperand* context = UseFixed(instr->context(), esi);
2604  LOperand* value = UseAtStart(instr->value());
2605  LTypeof* result = new(zone()) LTypeof(context, value);
2606  return MarkAsCall(DefineFixed(result, eax), instr);
2607 }
2608 
2609 
2610 LInstruction* LChunkBuilder::DoTypeofIsAndBranch(HTypeofIsAndBranch* instr) {
2611  LInstruction* goto_instr = CheckElideControlInstruction(instr);
2612  if (goto_instr != NULL) return goto_instr;
2613  return new(zone()) LTypeofIsAndBranch(UseTempRegister(instr->value()));
2614 }
2615 
2616 
2617 LInstruction* LChunkBuilder::DoIsConstructCallAndBranch(
2618  HIsConstructCallAndBranch* instr) {
2619  return new(zone()) LIsConstructCallAndBranch(TempRegister());
2620 }
2621 
2622 
2623 LInstruction* LChunkBuilder::DoSimulate(HSimulate* instr) {
2624  instr->ReplayEnvironment(current_block_->last_environment());
2625  return NULL;
2626 }
2627 
2628 
2629 LInstruction* LChunkBuilder::DoStackCheck(HStackCheck* instr) {
2630  info()->MarkAsDeferredCalling();
2631  if (instr->is_function_entry()) {
2632  LOperand* context = UseFixed(instr->context(), esi);
2633  return MarkAsCall(new(zone()) LStackCheck(context), instr);
2634  } else {
2635  ASSERT(instr->is_backwards_branch());
2636  LOperand* context = UseAny(instr->context());
2637  return AssignEnvironment(
2638  AssignPointerMap(new(zone()) LStackCheck(context)));
2639  }
2640 }
2641 
2642 
2643 LInstruction* LChunkBuilder::DoEnterInlined(HEnterInlined* instr) {
2644  HEnvironment* outer = current_block_->last_environment();
2645  HConstant* undefined = graph()->GetConstantUndefined();
2646  HEnvironment* inner = outer->CopyForInlining(instr->closure(),
2647  instr->arguments_count(),
2648  instr->function(),
2649  undefined,
2650  instr->inlining_kind());
2651  // Only replay binding of arguments object if it wasn't removed from graph.
2652  if (instr->arguments_var() != NULL && instr->arguments_object()->IsLinked()) {
2653  inner->Bind(instr->arguments_var(), instr->arguments_object());
2654  }
2655  inner->set_entry(instr);
2656  current_block_->UpdateEnvironment(inner);
2657  chunk_->AddInlinedClosure(instr->closure());
2658  return NULL;
2659 }
2660 
2661 
2662 LInstruction* LChunkBuilder::DoLeaveInlined(HLeaveInlined* instr) {
2663  LInstruction* pop = NULL;
2664 
2665  HEnvironment* env = current_block_->last_environment();
2666 
2667  if (env->entry()->arguments_pushed()) {
2668  int argument_count = env->arguments_environment()->parameter_count();
2669  pop = new(zone()) LDrop(argument_count);
2670  ASSERT(instr->argument_delta() == -argument_count);
2671  }
2672 
2673  HEnvironment* outer = current_block_->last_environment()->
2674  DiscardInlined(false);
2675  current_block_->UpdateEnvironment(outer);
2676  return pop;
2677 }
2678 
2679 
2680 LInstruction* LChunkBuilder::DoForInPrepareMap(HForInPrepareMap* instr) {
2681  LOperand* context = UseFixed(instr->context(), esi);
2682  LOperand* object = UseFixed(instr->enumerable(), eax);
2683  LForInPrepareMap* result = new(zone()) LForInPrepareMap(context, object);
2684  return MarkAsCall(DefineFixed(result, eax), instr, CAN_DEOPTIMIZE_EAGERLY);
2685 }
2686 
2687 
2688 LInstruction* LChunkBuilder::DoForInCacheArray(HForInCacheArray* instr) {
2689  LOperand* map = UseRegister(instr->map());
2690  return AssignEnvironment(DefineAsRegister(
2691  new(zone()) LForInCacheArray(map)));
2692 }
2693 
2694 
2695 LInstruction* LChunkBuilder::DoCheckMapValue(HCheckMapValue* instr) {
2696  LOperand* value = UseRegisterAtStart(instr->value());
2697  LOperand* map = UseRegisterAtStart(instr->map());
2698  return AssignEnvironment(new(zone()) LCheckMapValue(value, map));
2699 }
2700 
2701 
2702 LInstruction* LChunkBuilder::DoLoadFieldByIndex(HLoadFieldByIndex* instr) {
2703  LOperand* object = UseRegister(instr->object());
2704  LOperand* index = UseTempRegister(instr->index());
2705  return DefineSameAsFirst(new(zone()) LLoadFieldByIndex(object, index));
2706 }
2707 
2708 
2709 } } // namespace v8::internal
2710 
2711 #endif // V8_TARGET_ARCH_IA32
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter NULL
Definition: flags.cc:269
#define DEFINE_COMPILE(type)
Definition: lithium-arm.cc:38
static LUnallocated * cast(LOperand *op)
Definition: lithium.h:156
const char * ToCString(const v8::String::Utf8Value &value)
void PrintDataTo(StringStream *stream) V8_OVERRIDE
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf map
Definition: flags.cc:350
virtual void PrintDataTo(StringStream *stream) V8_OVERRIDE
Definition: lithium-arm.cc:124
static String * cast(Object *obj)
virtual void PrintOutputOperandTo(StringStream *stream)
Definition: lithium-arm.cc:99
const XMMRegister xmm4
int int32_t
Definition: unicode.cc:47
static bool IsSupported(CpuFeature f)
Definition: assembler-arm.h:68
LEnvironment * environment() const
Definition: lithium-arm.h:246
#define ASSERT(condition)
Definition: checks.h:329
static bool IsSafeForSnapshot(CpuFeature f)
Definition: assembler-arm.h:78
virtual const char * Mnemonic() const =0
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:86
bool IsSimpleMapChangeTransition(ElementsKind from_kind, ElementsKind to_kind)
#define LITHIUM_CONCRETE_INSTRUCTION_LIST(V)
Definition: lithium-arm.h:43
virtual LOperand * result() const =0
static const int kMaxFixedSlotIndex
Definition: lithium.h:195
uint32_t additional_index() const
const Register edi
virtual bool HasResult() const =0
#define UNREACHABLE()
Definition: checks.h:52
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object size
Definition: flags.cc:211
const Register eax
const XMMRegister xmm1
static const char * String(Value tok)
Definition: token.h:294
const Register ecx
bool HasEnvironment() const
Definition: lithium-arm.h:247
static int ToAllocationIndex(Register reg)
uint32_t additional_index() const
virtual void PrintTo(StringStream *stream)
Definition: lithium-arm.cc:67
bool IsDoubleInput(X87Register reg, LCodeGen *cgen)
const XMMRegister xmm3
LPointerMap * pointer_map() const
Definition: lithium-arm.h:250
static int ToAllocationIndex(XMMRegister reg)
const Register ebx
const char * ElementsKindToString(ElementsKind kind)
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function info
Definition: flags.cc:317
void PrintDataTo(StringStream *stream) V8_OVERRIDE
#define ASSERT_EQ(v1, v2)
Definition: checks.h:330
const Register esi
bool IsDoubleOrFloatElementsKind(ElementsKind kind)
void USE(T)
Definition: globals.h:341
HeapObject * obj
bool HasPointerMap() const
Definition: lithium-arm.h:251
static Representation Tagged()
bool IsRedundant() const
Definition: lithium-arm.cc:113
const XMMRegister xmm2
const Register edx
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long expose natives in global object expose freeBuffer extension expose gc extension under the specified name expose externalize string extension number of stack frames to capture disable builtin natives files print name of functions for which code is generated use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations always try to OSR functions trace optimize function deoptimization minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions trace debugging JSON request response trace out of bounds accesses to external arrays trace_js_array_abuse automatically set the debug break flag when debugger commands are in the queue abort by crashing maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print statistics of the maximum memory committed for the heap in name
Definition: flags.cc:505
static HValue * cast(HValue *value)
void PrintTo(StringStream *stream)
Definition: lithium.cc:55