v8  3.11.10(node0.8.26)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
lithium-ia32.cc
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "v8.h"
29 
30 #if defined(V8_TARGET_ARCH_IA32)
31 
32 #include "lithium-allocator-inl.h"
33 #include "ia32/lithium-ia32.h"
35 
36 namespace v8 {
37 namespace internal {
38 
39 #define DEFINE_COMPILE(type) \
40  void L##type::CompileToNative(LCodeGen* generator) { \
41  generator->Do##type(this); \
42  }
44 #undef DEFINE_COMPILE
45 
47  for (int i = 0; i < Register::kNumAllocatableRegisters; ++i) {
48  register_spills_[i] = NULL;
49  }
50  for (int i = 0; i < DoubleRegister::kNumAllocatableRegisters; ++i) {
51  double_register_spills_[i] = NULL;
52  }
53 }
54 
55 
56 void LOsrEntry::MarkSpilledRegister(int allocation_index,
57  LOperand* spill_operand) {
58  ASSERT(spill_operand->IsStackSlot());
59  ASSERT(register_spills_[allocation_index] == NULL);
60  register_spills_[allocation_index] = spill_operand;
61 }
62 
63 
64 void LOsrEntry::MarkSpilledDoubleRegister(int allocation_index,
65  LOperand* spill_operand) {
66  ASSERT(spill_operand->IsDoubleStackSlot());
67  ASSERT(double_register_spills_[allocation_index] == NULL);
68  double_register_spills_[allocation_index] = spill_operand;
69 }
70 
71 
72 #ifdef DEBUG
73 void LInstruction::VerifyCall() {
74  // Call instructions can use only fixed registers as temporaries and
75  // outputs because all registers are blocked by the calling convention.
76  // Inputs operands must use a fixed register or use-at-start policy or
77  // a non-register policy.
78  ASSERT(Output() == NULL ||
79  LUnallocated::cast(Output())->HasFixedPolicy() ||
80  !LUnallocated::cast(Output())->HasRegisterPolicy());
81  for (UseIterator it(this); !it.Done(); it.Advance()) {
82  LUnallocated* operand = LUnallocated::cast(it.Current());
83  ASSERT(operand->HasFixedPolicy() ||
84  operand->IsUsedAtStart());
85  }
86  for (TempIterator it(this); !it.Done(); it.Advance()) {
87  LUnallocated* operand = LUnallocated::cast(it.Current());
88  ASSERT(operand->HasFixedPolicy() ||!operand->HasRegisterPolicy());
89  }
90 }
91 #endif
92 
93 
94 void LInstruction::PrintTo(StringStream* stream) {
95  stream->Add("%s ", this->Mnemonic());
96 
97  PrintOutputOperandTo(stream);
98 
99  PrintDataTo(stream);
100 
101  if (HasEnvironment()) {
102  stream->Add(" ");
103  environment()->PrintTo(stream);
104  }
105 
106  if (HasPointerMap()) {
107  stream->Add(" ");
108  pointer_map()->PrintTo(stream);
109  }
110 }
111 
112 
113 void LInstruction::PrintDataTo(StringStream* stream) {
114  stream->Add("= ");
115  for (int i = 0; i < InputCount(); i++) {
116  if (i > 0) stream->Add(" ");
117  InputAt(i)->PrintTo(stream);
118  }
119 }
120 
121 
122 void LInstruction::PrintOutputOperandTo(StringStream* stream) {
123  if (HasResult()) result()->PrintTo(stream);
124 }
125 
126 
127 void LLabel::PrintDataTo(StringStream* stream) {
128  LGap::PrintDataTo(stream);
129  LLabel* rep = replacement();
130  if (rep != NULL) {
131  stream->Add(" Dead block replaced with B%d", rep->block_id());
132  }
133 }
134 
135 
136 bool LGap::IsRedundant() const {
137  for (int i = 0; i < 4; i++) {
138  if (parallel_moves_[i] != NULL && !parallel_moves_[i]->IsRedundant()) {
139  return false;
140  }
141  }
142 
143  return true;
144 }
145 
146 
147 void LGap::PrintDataTo(StringStream* stream) {
148  for (int i = 0; i < 4; i++) {
149  stream->Add("(");
150  if (parallel_moves_[i] != NULL) {
151  parallel_moves_[i]->PrintDataTo(stream);
152  }
153  stream->Add(") ");
154  }
155 }
156 
157 
158 const char* LArithmeticD::Mnemonic() const {
159  switch (op()) {
160  case Token::ADD: return "add-d";
161  case Token::SUB: return "sub-d";
162  case Token::MUL: return "mul-d";
163  case Token::DIV: return "div-d";
164  case Token::MOD: return "mod-d";
165  default:
166  UNREACHABLE();
167  return NULL;
168  }
169 }
170 
171 
172 const char* LArithmeticT::Mnemonic() const {
173  switch (op()) {
174  case Token::ADD: return "add-t";
175  case Token::SUB: return "sub-t";
176  case Token::MUL: return "mul-t";
177  case Token::MOD: return "mod-t";
178  case Token::DIV: return "div-t";
179  case Token::BIT_AND: return "bit-and-t";
180  case Token::BIT_OR: return "bit-or-t";
181  case Token::BIT_XOR: return "bit-xor-t";
182  case Token::SHL: return "sal-t";
183  case Token::SAR: return "sar-t";
184  case Token::SHR: return "shr-t";
185  default:
186  UNREACHABLE();
187  return NULL;
188  }
189 }
190 
191 
192 void LGoto::PrintDataTo(StringStream* stream) {
193  stream->Add("B%d", block_id());
194 }
195 
196 
197 void LBranch::PrintDataTo(StringStream* stream) {
198  stream->Add("B%d | B%d on ", true_block_id(), false_block_id());
199  InputAt(0)->PrintTo(stream);
200 }
201 
202 
203 void LCmpIDAndBranch::PrintDataTo(StringStream* stream) {
204  stream->Add("if ");
205  InputAt(0)->PrintTo(stream);
206  stream->Add(" %s ", Token::String(op()));
207  InputAt(1)->PrintTo(stream);
208  stream->Add(" then B%d else B%d", true_block_id(), false_block_id());
209 }
210 
211 
212 void LIsNilAndBranch::PrintDataTo(StringStream* stream) {
213  stream->Add("if ");
214  InputAt(0)->PrintTo(stream);
215  stream->Add(kind() == kStrictEquality ? " === " : " == ");
216  stream->Add(nil() == kNullValue ? "null" : "undefined");
217  stream->Add(" then B%d else B%d", true_block_id(), false_block_id());
218 }
219 
220 
221 void LIsObjectAndBranch::PrintDataTo(StringStream* stream) {
222  stream->Add("if is_object(");
223  InputAt(0)->PrintTo(stream);
224  stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
225 }
226 
227 
228 void LIsStringAndBranch::PrintDataTo(StringStream* stream) {
229  stream->Add("if is_string(");
230  InputAt(0)->PrintTo(stream);
231  stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
232 }
233 
234 
235 void LIsSmiAndBranch::PrintDataTo(StringStream* stream) {
236  stream->Add("if is_smi(");
237  InputAt(0)->PrintTo(stream);
238  stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
239 }
240 
241 
242 void LIsUndetectableAndBranch::PrintDataTo(StringStream* stream) {
243  stream->Add("if is_undetectable(");
244  InputAt(0)->PrintTo(stream);
245  stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
246 }
247 
248 
249 void LStringCompareAndBranch::PrintDataTo(StringStream* stream) {
250  stream->Add("if string_compare(");
251  InputAt(1)->PrintTo(stream);
252  InputAt(2)->PrintTo(stream);
253  stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
254 }
255 
256 
257 void LHasInstanceTypeAndBranch::PrintDataTo(StringStream* stream) {
258  stream->Add("if has_instance_type(");
259  InputAt(0)->PrintTo(stream);
260  stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
261 }
262 
263 
264 void LHasCachedArrayIndexAndBranch::PrintDataTo(StringStream* stream) {
265  stream->Add("if has_cached_array_index(");
266  InputAt(0)->PrintTo(stream);
267  stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
268 }
269 
270 
271 void LClassOfTestAndBranch::PrintDataTo(StringStream* stream) {
272  stream->Add("if class_of_test(");
273  InputAt(0)->PrintTo(stream);
274  stream->Add(", \"%o\") then B%d else B%d",
275  *hydrogen()->class_name(),
276  true_block_id(),
277  false_block_id());
278 }
279 
280 
281 void LTypeofIsAndBranch::PrintDataTo(StringStream* stream) {
282  stream->Add("if typeof ");
283  InputAt(0)->PrintTo(stream);
284  stream->Add(" == \"%s\" then B%d else B%d",
285  *hydrogen()->type_literal()->ToCString(),
287 }
288 
289 
290 void LCallConstantFunction::PrintDataTo(StringStream* stream) {
291  stream->Add("#%d / ", arity());
292 }
293 
294 
295 void LUnaryMathOperation::PrintDataTo(StringStream* stream) {
296  stream->Add("/%s ", hydrogen()->OpName());
297  InputAt(0)->PrintTo(stream);
298 }
299 
300 
301 void LMathPowHalf::PrintDataTo(StringStream* stream) {
302  stream->Add("/pow_half ");
303  InputAt(0)->PrintTo(stream);
304 }
305 
306 
307 void LLoadContextSlot::PrintDataTo(StringStream* stream) {
308  InputAt(0)->PrintTo(stream);
309  stream->Add("[%d]", slot_index());
310 }
311 
312 
313 void LStoreContextSlot::PrintDataTo(StringStream* stream) {
314  InputAt(0)->PrintTo(stream);
315  stream->Add("[%d] <- ", slot_index());
316  InputAt(1)->PrintTo(stream);
317 }
318 
319 
320 void LInvokeFunction::PrintDataTo(StringStream* stream) {
321  stream->Add("= ");
322  InputAt(0)->PrintTo(stream);
323  stream->Add(" ");
324  InputAt(1)->PrintTo(stream);
325  stream->Add(" #%d / ", arity());
326 }
327 
328 
329 void LCallKeyed::PrintDataTo(StringStream* stream) {
330  stream->Add("[ecx] #%d / ", arity());
331 }
332 
333 
334 void LCallNamed::PrintDataTo(StringStream* stream) {
335  SmartArrayPointer<char> name_string = name()->ToCString();
336  stream->Add("%s #%d / ", *name_string, arity());
337 }
338 
339 
340 void LCallGlobal::PrintDataTo(StringStream* stream) {
341  SmartArrayPointer<char> name_string = name()->ToCString();
342  stream->Add("%s #%d / ", *name_string, arity());
343 }
344 
345 
346 void LCallKnownGlobal::PrintDataTo(StringStream* stream) {
347  stream->Add("#%d / ", arity());
348 }
349 
350 
351 void LCallNew::PrintDataTo(StringStream* stream) {
352  stream->Add("= ");
353  InputAt(0)->PrintTo(stream);
354  stream->Add(" #%d / ", arity());
355 }
356 
357 
358 void LAccessArgumentsAt::PrintDataTo(StringStream* stream) {
359  arguments()->PrintTo(stream);
360 
361  stream->Add(" length ");
362  length()->PrintTo(stream);
363 
364  stream->Add(" index ");
365  index()->PrintTo(stream);
366 }
367 
368 
369 int LChunk::GetNextSpillIndex(bool is_double) {
370  // Skip a slot if for a double-width slot.
371  if (is_double) {
372  spill_slot_count_++;
373  spill_slot_count_ |= 1;
374  num_double_slots_++;
375  }
376  return spill_slot_count_++;
377 }
378 
379 
380 LOperand* LChunk::GetNextSpillSlot(bool is_double) {
381  int index = GetNextSpillIndex(is_double);
382  if (is_double) {
383  return LDoubleStackSlot::Create(index, zone());
384  } else {
385  return LStackSlot::Create(index, zone());
386  }
387 }
388 
389 
391  HPhase phase("L_Mark empty blocks", this);
392  for (int i = 0; i < graph()->blocks()->length(); ++i) {
393  HBasicBlock* block = graph()->blocks()->at(i);
394  int first = block->first_instruction_index();
395  int last = block->last_instruction_index();
396  LInstruction* first_instr = instructions()->at(first);
397  LInstruction* last_instr = instructions()->at(last);
398 
399  LLabel* label = LLabel::cast(first_instr);
400  if (last_instr->IsGoto()) {
401  LGoto* goto_instr = LGoto::cast(last_instr);
402  if (label->IsRedundant() &&
403  !label->is_loop_header()) {
404  bool can_eliminate = true;
405  for (int i = first + 1; i < last && can_eliminate; ++i) {
406  LInstruction* cur = instructions()->at(i);
407  if (cur->IsGap()) {
408  LGap* gap = LGap::cast(cur);
409  if (!gap->IsRedundant()) {
410  can_eliminate = false;
411  }
412  } else {
413  can_eliminate = false;
414  }
415  }
416 
417  if (can_eliminate) {
418  label->set_replacement(GetLabel(goto_instr->block_id()));
419  }
420  }
421  }
422  }
423 }
424 
425 
426 void LStoreNamedField::PrintDataTo(StringStream* stream) {
427  object()->PrintTo(stream);
428  stream->Add(".");
429  stream->Add(*String::cast(*name())->ToCString());
430  stream->Add(" <- ");
431  value()->PrintTo(stream);
432 }
433 
434 
435 void LStoreNamedGeneric::PrintDataTo(StringStream* stream) {
436  object()->PrintTo(stream);
437  stream->Add(".");
438  stream->Add(*String::cast(*name())->ToCString());
439  stream->Add(" <- ");
440  value()->PrintTo(stream);
441 }
442 
443 
444 void LStoreKeyedFastElement::PrintDataTo(StringStream* stream) {
445  object()->PrintTo(stream);
446  stream->Add("[");
447  key()->PrintTo(stream);
448  stream->Add("] <- ");
449  value()->PrintTo(stream);
450 }
451 
452 
453 void LStoreKeyedFastDoubleElement::PrintDataTo(StringStream* stream) {
454  elements()->PrintTo(stream);
455  stream->Add("[");
456  key()->PrintTo(stream);
457  stream->Add("] <- ");
458  value()->PrintTo(stream);
459 }
460 
461 
462 void LStoreKeyedGeneric::PrintDataTo(StringStream* stream) {
463  object()->PrintTo(stream);
464  stream->Add("[");
465  key()->PrintTo(stream);
466  stream->Add("] <- ");
467  value()->PrintTo(stream);
468 }
469 
470 
471 void LTransitionElementsKind::PrintDataTo(StringStream* stream) {
472  object()->PrintTo(stream);
473  stream->Add(" %p -> %p", *original_map(), *transitioned_map());
474 }
475 
476 
477 void LChunk::AddInstruction(LInstruction* instr, HBasicBlock* block) {
478  LInstructionGap* gap = new(graph_->zone()) LInstructionGap(block);
479  int index = -1;
480  if (instr->IsControl()) {
481  instructions_.Add(gap, zone());
482  index = instructions_.length();
483  instructions_.Add(instr, zone());
484  } else {
485  index = instructions_.length();
486  instructions_.Add(instr, zone());
487  instructions_.Add(gap, zone());
488  }
489  if (instr->HasPointerMap()) {
490  pointer_maps_.Add(instr->pointer_map(), zone());
491  instr->pointer_map()->set_lithium_position(index);
492  }
493 }
494 
495 
496 LConstantOperand* LChunk::DefineConstantOperand(HConstant* constant) {
497  return LConstantOperand::Create(constant->id(), zone());
498 }
499 
500 
501 int LChunk::GetParameterStackSlot(int index) const {
502  // The receiver is at index 0, the first parameter at index 1, so we
503  // shift all parameter indexes down by the number of parameters, and
504  // make sure they end up negative so they are distinguishable from
505  // spill slots.
506  int result = index - info()->scope()->num_parameters() - 1;
507  ASSERT(result < 0);
508  return result;
509 }
510 
511 // A parameter relative to ebp in the arguments stub.
512 int LChunk::ParameterAt(int index) {
513  ASSERT(-1 <= index); // -1 is the receiver.
514  return (1 + info()->scope()->num_parameters() - index) *
515  kPointerSize;
516 }
517 
518 
519 LGap* LChunk::GetGapAt(int index) const {
520  return LGap::cast(instructions_[index]);
521 }
522 
523 
524 bool LChunk::IsGapAt(int index) const {
525  return instructions_[index]->IsGap();
526 }
527 
528 
529 int LChunk::NearestGapPos(int index) const {
530  while (!IsGapAt(index)) index--;
531  return index;
532 }
533 
534 
535 void LChunk::AddGapMove(int index, LOperand* from, LOperand* to) {
537  LGap::START, zone())->AddMove(from, to, zone());
538 }
539 
540 
541 Handle<Object> LChunk::LookupLiteral(LConstantOperand* operand) const {
542  return HConstant::cast(graph_->LookupValue(operand->index()))->handle();
543 }
544 
545 
547  LConstantOperand* operand) const {
548  return graph_->LookupValue(operand->index())->representation();
549 }
550 
551 
552 LChunk* LChunkBuilder::Build() {
553  ASSERT(is_unused());
554  chunk_ = new(zone()) LChunk(info(), graph());
555  HPhase phase("L_Building chunk", chunk_);
556  status_ = BUILDING;
557 
558  // Reserve the first spill slot for the state of dynamic alignment.
559  int alignment_state_index = chunk_->GetNextSpillIndex(false);
560  ASSERT_EQ(alignment_state_index, 0);
561  USE(alignment_state_index);
562 
563  const ZoneList<HBasicBlock*>* blocks = graph()->blocks();
564  for (int i = 0; i < blocks->length(); i++) {
565  HBasicBlock* next = NULL;
566  if (i < blocks->length() - 1) next = blocks->at(i + 1);
567  DoBasicBlock(blocks->at(i), next);
568  if (is_aborted()) return NULL;
569  }
570  status_ = DONE;
571  return chunk_;
572 }
573 
574 
575 void LChunkBuilder::Abort(const char* format, ...) {
576  if (FLAG_trace_bailout) {
577  SmartArrayPointer<char> name(
578  info()->shared_info()->DebugName()->ToCString());
579  PrintF("Aborting LChunk building in @\"%s\": ", *name);
580  va_list arguments;
581  va_start(arguments, format);
582  OS::VPrint(format, arguments);
583  va_end(arguments);
584  PrintF("\n");
585  }
586  status_ = ABORTED;
587 }
588 
589 
590 LUnallocated* LChunkBuilder::ToUnallocated(Register reg) {
591  return new(zone()) LUnallocated(LUnallocated::FIXED_REGISTER,
593 }
594 
595 
596 LUnallocated* LChunkBuilder::ToUnallocated(XMMRegister reg) {
597  return new(zone()) LUnallocated(LUnallocated::FIXED_DOUBLE_REGISTER,
599 }
600 
601 
602 LOperand* LChunkBuilder::UseFixed(HValue* value, Register fixed_register) {
603  return Use(value, ToUnallocated(fixed_register));
604 }
605 
606 
607 LOperand* LChunkBuilder::UseFixedDouble(HValue* value, XMMRegister reg) {
608  return Use(value, ToUnallocated(reg));
609 }
610 
611 
612 LOperand* LChunkBuilder::UseRegister(HValue* value) {
613  return Use(value, new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER));
614 }
615 
616 
617 LOperand* LChunkBuilder::UseRegisterAtStart(HValue* value) {
618  return Use(value,
619  new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER,
621 }
622 
623 
624 LOperand* LChunkBuilder::UseTempRegister(HValue* value) {
625  return Use(value, new(zone()) LUnallocated(LUnallocated::WRITABLE_REGISTER));
626 }
627 
628 
629 LOperand* LChunkBuilder::Use(HValue* value) {
630  return Use(value, new(zone()) LUnallocated(LUnallocated::NONE));
631 }
632 
633 
634 LOperand* LChunkBuilder::UseAtStart(HValue* value) {
635  return Use(value, new(zone()) LUnallocated(LUnallocated::NONE,
637 }
638 
639 
640 LOperand* LChunkBuilder::UseOrConstant(HValue* value) {
641  return value->IsConstant()
642  ? chunk_->DefineConstantOperand(HConstant::cast(value))
643  : Use(value);
644 }
645 
646 
647 LOperand* LChunkBuilder::UseOrConstantAtStart(HValue* value) {
648  return value->IsConstant()
649  ? chunk_->DefineConstantOperand(HConstant::cast(value))
650  : UseAtStart(value);
651 }
652 
653 
654 LOperand* LChunkBuilder::UseRegisterOrConstant(HValue* value) {
655  return value->IsConstant()
656  ? chunk_->DefineConstantOperand(HConstant::cast(value))
657  : UseRegister(value);
658 }
659 
660 
661 LOperand* LChunkBuilder::UseRegisterOrConstantAtStart(HValue* value) {
662  return value->IsConstant()
663  ? chunk_->DefineConstantOperand(HConstant::cast(value))
664  : UseRegisterAtStart(value);
665 }
666 
667 
668 LOperand* LChunkBuilder::UseAny(HValue* value) {
669  return value->IsConstant()
670  ? chunk_->DefineConstantOperand(HConstant::cast(value))
671  : Use(value, new(zone()) LUnallocated(LUnallocated::ANY));
672 }
673 
674 
675 LOperand* LChunkBuilder::Use(HValue* value, LUnallocated* operand) {
676  if (value->EmitAtUses()) {
677  HInstruction* instr = HInstruction::cast(value);
678  VisitInstruction(instr);
679  }
680  operand->set_virtual_register(value->id());
681  return operand;
682 }
683 
684 
685 template<int I, int T>
686 LInstruction* LChunkBuilder::Define(LTemplateInstruction<1, I, T>* instr,
687  LUnallocated* result) {
688  result->set_virtual_register(current_instruction_->id());
689  instr->set_result(result);
690  return instr;
691 }
692 
693 
694 template<int I, int T>
695 LInstruction* LChunkBuilder::DefineAsRegister(
696  LTemplateInstruction<1, I, T>* instr) {
697  return Define(instr,
698  new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER));
699 }
700 
701 
702 template<int I, int T>
703 LInstruction* LChunkBuilder::DefineAsSpilled(
704  LTemplateInstruction<1, I, T>* instr,
705  int index) {
706  return Define(instr,
707  new(zone()) LUnallocated(LUnallocated::FIXED_SLOT, index));
708 }
709 
710 
711 template<int I, int T>
712 LInstruction* LChunkBuilder::DefineSameAsFirst(
713  LTemplateInstruction<1, I, T>* instr) {
714  return Define(instr,
715  new(zone()) LUnallocated(LUnallocated::SAME_AS_FIRST_INPUT));
716 }
717 
718 
719 template<int I, int T>
720 LInstruction* LChunkBuilder::DefineFixed(LTemplateInstruction<1, I, T>* instr,
721  Register reg) {
722  return Define(instr, ToUnallocated(reg));
723 }
724 
725 
726 template<int I, int T>
727 LInstruction* LChunkBuilder::DefineFixedDouble(
728  LTemplateInstruction<1, I, T>* instr,
729  XMMRegister reg) {
730  return Define(instr, ToUnallocated(reg));
731 }
732 
733 
734 LInstruction* LChunkBuilder::AssignEnvironment(LInstruction* instr) {
735  HEnvironment* hydrogen_env = current_block_->last_environment();
736  int argument_index_accumulator = 0;
737  instr->set_environment(CreateEnvironment(hydrogen_env,
738  &argument_index_accumulator));
739  return instr;
740 }
741 
742 
743 LInstruction* LChunkBuilder::MarkAsCall(LInstruction* instr,
744  HInstruction* hinstr,
745  CanDeoptimize can_deoptimize) {
746 #ifdef DEBUG
747  instr->VerifyCall();
748 #endif
749  instr->MarkAsCall();
750  instr = AssignPointerMap(instr);
751 
752  if (hinstr->HasObservableSideEffects()) {
753  ASSERT(hinstr->next()->IsSimulate());
754  HSimulate* sim = HSimulate::cast(hinstr->next());
755  ASSERT(instruction_pending_deoptimization_environment_ == NULL);
756  ASSERT(pending_deoptimization_ast_id_ == AstNode::kNoNumber);
757  instruction_pending_deoptimization_environment_ = instr;
758  pending_deoptimization_ast_id_ = sim->ast_id();
759  }
760 
761  // If instruction does not have side-effects lazy deoptimization
762  // after the call will try to deoptimize to the point before the call.
763  // Thus we still need to attach environment to this call even if
764  // call sequence can not deoptimize eagerly.
765  bool needs_environment =
766  (can_deoptimize == CAN_DEOPTIMIZE_EAGERLY) ||
767  !hinstr->HasObservableSideEffects();
768  if (needs_environment && !instr->HasEnvironment()) {
769  instr = AssignEnvironment(instr);
770  }
771 
772  return instr;
773 }
774 
775 
776 LInstruction* LChunkBuilder::AssignPointerMap(LInstruction* instr) {
777  ASSERT(!instr->HasPointerMap());
778  instr->set_pointer_map(new(zone()) LPointerMap(position_, zone()));
779  return instr;
780 }
781 
782 
783 LUnallocated* LChunkBuilder::TempRegister() {
784  LUnallocated* operand =
785  new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER);
786  operand->set_virtual_register(allocator_->GetVirtualRegister());
787  if (!allocator_->AllocationOk()) {
788  Abort("Not enough virtual registers (temps).");
789  }
790  return operand;
791 }
792 
793 
794 LOperand* LChunkBuilder::FixedTemp(Register reg) {
795  LUnallocated* operand = ToUnallocated(reg);
796  ASSERT(operand->HasFixedPolicy());
797  return operand;
798 }
799 
800 
801 LOperand* LChunkBuilder::FixedTemp(XMMRegister reg) {
802  LUnallocated* operand = ToUnallocated(reg);
803  ASSERT(operand->HasFixedPolicy());
804  return operand;
805 }
806 
807 
808 LInstruction* LChunkBuilder::DoBlockEntry(HBlockEntry* instr) {
809  return new(zone()) LLabel(instr->block());
810 }
811 
812 
813 LInstruction* LChunkBuilder::DoSoftDeoptimize(HSoftDeoptimize* instr) {
814  return AssignEnvironment(new(zone()) LDeoptimize);
815 }
816 
817 
818 LInstruction* LChunkBuilder::DoDeoptimize(HDeoptimize* instr) {
819  return AssignEnvironment(new(zone()) LDeoptimize);
820 }
821 
822 
823 LInstruction* LChunkBuilder::DoShift(Token::Value op,
824  HBitwiseBinaryOperation* instr) {
825  if (instr->representation().IsTagged()) {
826  ASSERT(instr->left()->representation().IsTagged());
827  ASSERT(instr->right()->representation().IsTagged());
828 
829  LOperand* context = UseFixed(instr->context(), esi);
830  LOperand* left = UseFixed(instr->left(), edx);
831  LOperand* right = UseFixed(instr->right(), eax);
832  LArithmeticT* result = new(zone()) LArithmeticT(op, context, left, right);
833  return MarkAsCall(DefineFixed(result, eax), instr);
834  }
835 
836  ASSERT(instr->representation().IsInteger32());
837  ASSERT(instr->left()->representation().IsInteger32());
838  ASSERT(instr->right()->representation().IsInteger32());
839  LOperand* left = UseRegisterAtStart(instr->left());
840 
841  HValue* right_value = instr->right();
842  LOperand* right = NULL;
843  int constant_value = 0;
844  if (right_value->IsConstant()) {
845  HConstant* constant = HConstant::cast(right_value);
846  right = chunk_->DefineConstantOperand(constant);
847  constant_value = constant->Integer32Value() & 0x1f;
848  } else {
849  right = UseFixed(right_value, ecx);
850  }
851 
852  // Shift operations can only deoptimize if we do a logical shift by 0 and
853  // the result cannot be truncated to int32.
854  bool may_deopt = (op == Token::SHR && constant_value == 0);
855  bool does_deopt = false;
856  if (may_deopt) {
857  for (HUseIterator it(instr->uses()); !it.Done(); it.Advance()) {
858  if (!it.value()->CheckFlag(HValue::kTruncatingToInt32)) {
859  does_deopt = true;
860  break;
861  }
862  }
863  }
864 
865  LInstruction* result =
866  DefineSameAsFirst(new(zone()) LShiftI(op, left, right, does_deopt));
867  return does_deopt ? AssignEnvironment(result) : result;
868 }
869 
870 
871 LInstruction* LChunkBuilder::DoArithmeticD(Token::Value op,
872  HArithmeticBinaryOperation* instr) {
873  ASSERT(instr->representation().IsDouble());
874  ASSERT(instr->left()->representation().IsDouble());
875  ASSERT(instr->right()->representation().IsDouble());
876  ASSERT(op != Token::MOD);
877  LOperand* left = UseRegisterAtStart(instr->left());
878  LOperand* right = UseRegisterAtStart(instr->right());
879  LArithmeticD* result = new(zone()) LArithmeticD(op, left, right);
880  return DefineSameAsFirst(result);
881 }
882 
883 
884 LInstruction* LChunkBuilder::DoArithmeticT(Token::Value op,
885  HArithmeticBinaryOperation* instr) {
886  ASSERT(op == Token::ADD ||
887  op == Token::DIV ||
888  op == Token::MOD ||
889  op == Token::MUL ||
890  op == Token::SUB);
891  HValue* left = instr->left();
892  HValue* right = instr->right();
893  ASSERT(left->representation().IsTagged());
894  ASSERT(right->representation().IsTagged());
895  LOperand* context = UseFixed(instr->context(), esi);
896  LOperand* left_operand = UseFixed(left, edx);
897  LOperand* right_operand = UseFixed(right, eax);
898  LArithmeticT* result =
899  new(zone()) LArithmeticT(op, context, left_operand, right_operand);
900  return MarkAsCall(DefineFixed(result, eax), instr);
901 }
902 
903 
904 void LChunkBuilder::DoBasicBlock(HBasicBlock* block, HBasicBlock* next_block) {
905  ASSERT(is_building());
906  current_block_ = block;
907  next_block_ = next_block;
908  if (block->IsStartBlock()) {
909  block->UpdateEnvironment(graph_->start_environment());
910  argument_count_ = 0;
911  } else if (block->predecessors()->length() == 1) {
912  // We have a single predecessor => copy environment and outgoing
913  // argument count from the predecessor.
914  ASSERT(block->phis()->length() == 0);
915  HBasicBlock* pred = block->predecessors()->at(0);
916  HEnvironment* last_environment = pred->last_environment();
917  ASSERT(last_environment != NULL);
918  // Only copy the environment, if it is later used again.
919  if (pred->end()->SecondSuccessor() == NULL) {
920  ASSERT(pred->end()->FirstSuccessor() == block);
921  } else {
922  if (pred->end()->FirstSuccessor()->block_id() > block->block_id() ||
923  pred->end()->SecondSuccessor()->block_id() > block->block_id()) {
924  last_environment = last_environment->Copy();
925  }
926  }
927  block->UpdateEnvironment(last_environment);
928  ASSERT(pred->argument_count() >= 0);
929  argument_count_ = pred->argument_count();
930  } else {
931  // We are at a state join => process phis.
932  HBasicBlock* pred = block->predecessors()->at(0);
933  // No need to copy the environment, it cannot be used later.
934  HEnvironment* last_environment = pred->last_environment();
935  for (int i = 0; i < block->phis()->length(); ++i) {
936  HPhi* phi = block->phis()->at(i);
937  last_environment->SetValueAt(phi->merged_index(), phi);
938  }
939  for (int i = 0; i < block->deleted_phis()->length(); ++i) {
940  last_environment->SetValueAt(block->deleted_phis()->at(i),
941  graph_->GetConstantUndefined());
942  }
943  block->UpdateEnvironment(last_environment);
944  // Pick up the outgoing argument count of one of the predecessors.
945  argument_count_ = pred->argument_count();
946  }
947  HInstruction* current = block->first();
948  int start = chunk_->instructions()->length();
949  while (current != NULL && !is_aborted()) {
950  // Code for constants in registers is generated lazily.
951  if (!current->EmitAtUses()) {
952  VisitInstruction(current);
953  }
954  current = current->next();
955  }
956  int end = chunk_->instructions()->length() - 1;
957  if (end >= start) {
958  block->set_first_instruction_index(start);
959  block->set_last_instruction_index(end);
960  }
961  block->set_argument_count(argument_count_);
962  next_block_ = NULL;
963  current_block_ = NULL;
964 }
965 
966 
967 void LChunkBuilder::VisitInstruction(HInstruction* current) {
968  HInstruction* old_current = current_instruction_;
969  current_instruction_ = current;
970  if (current->has_position()) position_ = current->position();
971  LInstruction* instr = current->CompileToLithium(this);
972 
973  if (instr != NULL) {
974  if (FLAG_stress_pointer_maps && !instr->HasPointerMap()) {
975  instr = AssignPointerMap(instr);
976  }
977  if (FLAG_stress_environments && !instr->HasEnvironment()) {
978  instr = AssignEnvironment(instr);
979  }
980  instr->set_hydrogen_value(current);
981  chunk_->AddInstruction(instr, current_block_);
982  }
983  current_instruction_ = old_current;
984 }
985 
986 
987 LEnvironment* LChunkBuilder::CreateEnvironment(
988  HEnvironment* hydrogen_env,
989  int* argument_index_accumulator) {
990  if (hydrogen_env == NULL) return NULL;
991 
992  LEnvironment* outer =
993  CreateEnvironment(hydrogen_env->outer(), argument_index_accumulator);
994  int ast_id = hydrogen_env->ast_id();
995  ASSERT(ast_id != AstNode::kNoNumber ||
996  hydrogen_env->frame_type() != JS_FUNCTION);
997  int value_count = hydrogen_env->length();
998  LEnvironment* result =
999  new(zone()) LEnvironment(hydrogen_env->closure(),
1000  hydrogen_env->frame_type(),
1001  ast_id,
1002  hydrogen_env->parameter_count(),
1003  argument_count_,
1004  value_count,
1005  outer,
1006  zone());
1007  int argument_index = *argument_index_accumulator;
1008  for (int i = 0; i < value_count; ++i) {
1009  if (hydrogen_env->is_special_index(i)) continue;
1010 
1011  HValue* value = hydrogen_env->values()->at(i);
1012  LOperand* op = NULL;
1013  if (value->IsArgumentsObject()) {
1014  op = NULL;
1015  } else if (value->IsPushArgument()) {
1016  op = new(zone()) LArgument(argument_index++);
1017  } else {
1018  op = UseAny(value);
1019  }
1020  result->AddValue(op, value->representation());
1021  }
1022 
1023  if (hydrogen_env->frame_type() == JS_FUNCTION) {
1024  *argument_index_accumulator = argument_index;
1025  }
1026 
1027  return result;
1028 }
1029 
1030 
1031 LInstruction* LChunkBuilder::DoGoto(HGoto* instr) {
1032  return new(zone()) LGoto(instr->FirstSuccessor()->block_id());
1033 }
1034 
1035 
1036 LInstruction* LChunkBuilder::DoBranch(HBranch* instr) {
1037  HValue* value = instr->value();
1038  if (value->EmitAtUses()) {
1039  ASSERT(value->IsConstant());
1040  ASSERT(!value->representation().IsDouble());
1041  HBasicBlock* successor = HConstant::cast(value)->ToBoolean()
1042  ? instr->FirstSuccessor()
1043  : instr->SecondSuccessor();
1044  return new(zone()) LGoto(successor->block_id());
1045  }
1046 
1047  // Untagged integers or doubles, smis and booleans don't require a
1048  // deoptimization environment nor a temp register.
1049  Representation rep = value->representation();
1050  HType type = value->type();
1051  if (!rep.IsTagged() || type.IsSmi() || type.IsBoolean()) {
1052  return new(zone()) LBranch(UseRegister(value), NULL);
1053  }
1054 
1055  ToBooleanStub::Types expected = instr->expected_input_types();
1056  // We need a temporary register when we have to access the map *or* we have
1057  // no type info yet, in which case we handle all cases (including the ones
1058  // involving maps).
1059  bool needs_temp = expected.NeedsMap() || expected.IsEmpty();
1060  LOperand* temp = needs_temp ? TempRegister() : NULL;
1061  return AssignEnvironment(new(zone()) LBranch(UseRegister(value), temp));
1062 }
1063 
1064 
1065 LInstruction* LChunkBuilder::DoCompareMap(HCompareMap* instr) {
1066  ASSERT(instr->value()->representation().IsTagged());
1067  LOperand* value = UseRegisterAtStart(instr->value());
1068  return new(zone()) LCmpMapAndBranch(value);
1069 }
1070 
1071 
1072 LInstruction* LChunkBuilder::DoArgumentsLength(HArgumentsLength* length) {
1073  return DefineAsRegister(new(zone()) LArgumentsLength(Use(length->value())));
1074 }
1075 
1076 
1077 LInstruction* LChunkBuilder::DoArgumentsElements(HArgumentsElements* elems) {
1078  return DefineAsRegister(new(zone()) LArgumentsElements);
1079 }
1080 
1081 
1082 LInstruction* LChunkBuilder::DoInstanceOf(HInstanceOf* instr) {
1083  LOperand* left = UseFixed(instr->left(), InstanceofStub::left());
1084  LOperand* right = UseFixed(instr->right(), InstanceofStub::right());
1085  LOperand* context = UseFixed(instr->context(), esi);
1086  LInstanceOf* result = new(zone()) LInstanceOf(context, left, right);
1087  return MarkAsCall(DefineFixed(result, eax), instr);
1088 }
1089 
1090 
1091 LInstruction* LChunkBuilder::DoInstanceOfKnownGlobal(
1092  HInstanceOfKnownGlobal* instr) {
1093  LInstanceOfKnownGlobal* result =
1094  new(zone()) LInstanceOfKnownGlobal(
1095  UseFixed(instr->context(), esi),
1096  UseFixed(instr->left(), InstanceofStub::left()),
1097  FixedTemp(edi));
1098  return MarkAsCall(DefineFixed(result, eax), instr);
1099 }
1100 
1101 
1102 LInstruction* LChunkBuilder::DoWrapReceiver(HWrapReceiver* instr) {
1103  LOperand* receiver = UseRegister(instr->receiver());
1104  LOperand* function = UseRegisterAtStart(instr->function());
1105  LOperand* temp = TempRegister();
1106  LWrapReceiver* result =
1107  new(zone()) LWrapReceiver(receiver, function, temp);
1108  return AssignEnvironment(DefineSameAsFirst(result));
1109 }
1110 
1111 
1112 LInstruction* LChunkBuilder::DoApplyArguments(HApplyArguments* instr) {
1113  LOperand* function = UseFixed(instr->function(), edi);
1114  LOperand* receiver = UseFixed(instr->receiver(), eax);
1115  LOperand* length = UseFixed(instr->length(), ebx);
1116  LOperand* elements = UseFixed(instr->elements(), ecx);
1117  LApplyArguments* result = new(zone()) LApplyArguments(function,
1118  receiver,
1119  length,
1120  elements);
1121  return MarkAsCall(DefineFixed(result, eax), instr, CAN_DEOPTIMIZE_EAGERLY);
1122 }
1123 
1124 
1125 LInstruction* LChunkBuilder::DoPushArgument(HPushArgument* instr) {
1126  ++argument_count_;
1127  LOperand* argument = UseAny(instr->argument());
1128  return new(zone()) LPushArgument(argument);
1129 }
1130 
1131 
1132 LInstruction* LChunkBuilder::DoThisFunction(HThisFunction* instr) {
1133  return instr->HasNoUses()
1134  ? NULL
1135  : DefineAsRegister(new(zone()) LThisFunction);
1136 }
1137 
1138 
1139 LInstruction* LChunkBuilder::DoContext(HContext* instr) {
1140  return instr->HasNoUses() ? NULL : DefineAsRegister(new(zone()) LContext);
1141 }
1142 
1143 
1144 LInstruction* LChunkBuilder::DoOuterContext(HOuterContext* instr) {
1145  LOperand* context = UseRegisterAtStart(instr->value());
1146  return DefineAsRegister(new(zone()) LOuterContext(context));
1147 }
1148 
1149 
1150 LInstruction* LChunkBuilder::DoDeclareGlobals(HDeclareGlobals* instr) {
1151  LOperand* context = UseFixed(instr->context(), esi);
1152  return MarkAsCall(new(zone()) LDeclareGlobals(context), instr);
1153 }
1154 
1155 
1156 LInstruction* LChunkBuilder::DoGlobalObject(HGlobalObject* instr) {
1157  LOperand* context = UseRegisterAtStart(instr->value());
1158  return DefineAsRegister(new(zone()) LGlobalObject(context));
1159 }
1160 
1161 
1162 LInstruction* LChunkBuilder::DoGlobalReceiver(HGlobalReceiver* instr) {
1163  LOperand* global_object = UseRegisterAtStart(instr->value());
1164  return DefineAsRegister(new(zone()) LGlobalReceiver(global_object));
1165 }
1166 
1167 
1168 LInstruction* LChunkBuilder::DoCallConstantFunction(
1169  HCallConstantFunction* instr) {
1170  argument_count_ -= instr->argument_count();
1171  return MarkAsCall(DefineFixed(new(zone()) LCallConstantFunction, eax), instr);
1172 }
1173 
1174 
1175 LInstruction* LChunkBuilder::DoInvokeFunction(HInvokeFunction* instr) {
1176  LOperand* context = UseFixed(instr->context(), esi);
1177  LOperand* function = UseFixed(instr->function(), edi);
1178  argument_count_ -= instr->argument_count();
1179  LInvokeFunction* result = new(zone()) LInvokeFunction(context, function);
1180  return MarkAsCall(DefineFixed(result, eax), instr, CANNOT_DEOPTIMIZE_EAGERLY);
1181 }
1182 
1183 
1184 LInstruction* LChunkBuilder::DoUnaryMathOperation(HUnaryMathOperation* instr) {
1185  BuiltinFunctionId op = instr->op();
1186  if (op == kMathLog) {
1187  ASSERT(instr->representation().IsDouble());
1188  ASSERT(instr->value()->representation().IsDouble());
1189  LOperand* context = UseAny(instr->context()); // Not actually used.
1190  LOperand* input = UseRegisterAtStart(instr->value());
1191  LUnaryMathOperation* result = new(zone()) LUnaryMathOperation(context,
1192  input);
1193  return DefineSameAsFirst(result);
1194  } else if (op == kMathSin || op == kMathCos || op == kMathTan) {
1195  LOperand* context = UseFixed(instr->context(), esi);
1196  LOperand* input = UseFixedDouble(instr->value(), xmm1);
1197  LUnaryMathOperation* result = new(zone()) LUnaryMathOperation(context,
1198  input);
1199  return MarkAsCall(DefineFixedDouble(result, xmm1), instr);
1200  } else {
1201  LOperand* input = UseRegisterAtStart(instr->value());
1202  LOperand* context = UseAny(instr->context()); // Deferred use by MathAbs.
1203  if (op == kMathPowHalf) {
1204  LOperand* temp = TempRegister();
1205  LMathPowHalf* result = new(zone()) LMathPowHalf(context, input, temp);
1206  return DefineSameAsFirst(result);
1207  }
1208  LUnaryMathOperation* result = new(zone()) LUnaryMathOperation(context,
1209  input);
1210  switch (op) {
1211  case kMathAbs:
1212  return AssignEnvironment(AssignPointerMap(DefineSameAsFirst(result)));
1213  case kMathFloor:
1214  return AssignEnvironment(DefineAsRegister(result));
1215  case kMathRound:
1216  return AssignEnvironment(DefineAsRegister(result));
1217  case kMathSqrt:
1218  return DefineSameAsFirst(result);
1219  default:
1220  UNREACHABLE();
1221  return NULL;
1222  }
1223  }
1224 }
1225 
1226 
1227 LInstruction* LChunkBuilder::DoCallKeyed(HCallKeyed* instr) {
1228  ASSERT(instr->key()->representation().IsTagged());
1229  LOperand* context = UseFixed(instr->context(), esi);
1230  LOperand* key = UseFixed(instr->key(), ecx);
1231  argument_count_ -= instr->argument_count();
1232  LCallKeyed* result = new(zone()) LCallKeyed(context, key);
1233  return MarkAsCall(DefineFixed(result, eax), instr);
1234 }
1235 
1236 
1237 LInstruction* LChunkBuilder::DoCallNamed(HCallNamed* instr) {
1238  LOperand* context = UseFixed(instr->context(), esi);
1239  argument_count_ -= instr->argument_count();
1240  LCallNamed* result = new(zone()) LCallNamed(context);
1241  return MarkAsCall(DefineFixed(result, eax), instr);
1242 }
1243 
1244 
1245 LInstruction* LChunkBuilder::DoCallGlobal(HCallGlobal* instr) {
1246  LOperand* context = UseFixed(instr->context(), esi);
1247  argument_count_ -= instr->argument_count();
1248  LCallGlobal* result = new(zone()) LCallGlobal(context);
1249  return MarkAsCall(DefineFixed(result, eax), instr);
1250 }
1251 
1252 
1253 LInstruction* LChunkBuilder::DoCallKnownGlobal(HCallKnownGlobal* instr) {
1254  argument_count_ -= instr->argument_count();
1255  return MarkAsCall(DefineFixed(new(zone()) LCallKnownGlobal, eax), instr);
1256 }
1257 
1258 
1259 LInstruction* LChunkBuilder::DoCallNew(HCallNew* instr) {
1260  LOperand* context = UseFixed(instr->context(), esi);
1261  LOperand* constructor = UseFixed(instr->constructor(), edi);
1262  argument_count_ -= instr->argument_count();
1263  LCallNew* result = new(zone()) LCallNew(context, constructor);
1264  return MarkAsCall(DefineFixed(result, eax), instr);
1265 }
1266 
1267 
1268 LInstruction* LChunkBuilder::DoCallFunction(HCallFunction* instr) {
1269  LOperand* context = UseFixed(instr->context(), esi);
1270  LOperand* function = UseFixed(instr->function(), edi);
1271  argument_count_ -= instr->argument_count();
1272  LCallFunction* result = new(zone()) LCallFunction(context, function);
1273  return MarkAsCall(DefineFixed(result, eax), instr);
1274 }
1275 
1276 
1277 LInstruction* LChunkBuilder::DoCallRuntime(HCallRuntime* instr) {
1278  argument_count_ -= instr->argument_count();
1279  LOperand* context = UseFixed(instr->context(), esi);
1280  return MarkAsCall(DefineFixed(new(zone()) LCallRuntime(context), eax), instr);
1281 }
1282 
1283 
1284 LInstruction* LChunkBuilder::DoShr(HShr* instr) {
1285  return DoShift(Token::SHR, instr);
1286 }
1287 
1288 
1289 LInstruction* LChunkBuilder::DoSar(HSar* instr) {
1290  return DoShift(Token::SAR, instr);
1291 }
1292 
1293 
1294 LInstruction* LChunkBuilder::DoShl(HShl* instr) {
1295  return DoShift(Token::SHL, instr);
1296 }
1297 
1298 
1299 LInstruction* LChunkBuilder::DoBitwise(HBitwise* instr) {
1300  if (instr->representation().IsInteger32()) {
1301  ASSERT(instr->left()->representation().IsInteger32());
1302  ASSERT(instr->right()->representation().IsInteger32());
1303 
1304  LOperand* left = UseRegisterAtStart(instr->LeastConstantOperand());
1305  LOperand* right = UseOrConstantAtStart(instr->MostConstantOperand());
1306  return DefineSameAsFirst(new(zone()) LBitI(left, right));
1307  } else {
1308  ASSERT(instr->representation().IsTagged());
1309  ASSERT(instr->left()->representation().IsTagged());
1310  ASSERT(instr->right()->representation().IsTagged());
1311 
1312  LOperand* context = UseFixed(instr->context(), esi);
1313  LOperand* left = UseFixed(instr->left(), edx);
1314  LOperand* right = UseFixed(instr->right(), eax);
1315  LArithmeticT* result =
1316  new(zone()) LArithmeticT(instr->op(), context, left, right);
1317  return MarkAsCall(DefineFixed(result, eax), instr);
1318  }
1319 }
1320 
1321 
1322 LInstruction* LChunkBuilder::DoBitNot(HBitNot* instr) {
1323  ASSERT(instr->value()->representation().IsInteger32());
1324  ASSERT(instr->representation().IsInteger32());
1325  if (instr->HasNoUses()) return NULL;
1326  LOperand* input = UseRegisterAtStart(instr->value());
1327  LBitNotI* result = new(zone()) LBitNotI(input);
1328  return DefineSameAsFirst(result);
1329 }
1330 
1331 
1332 LInstruction* LChunkBuilder::DoDiv(HDiv* instr) {
1333  if (instr->representation().IsDouble()) {
1334  return DoArithmeticD(Token::DIV, instr);
1335  } else if (instr->representation().IsInteger32()) {
1336  // The temporary operand is necessary to ensure that right is not allocated
1337  // into edx.
1338  LOperand* temp = FixedTemp(edx);
1339  LOperand* dividend = UseFixed(instr->left(), eax);
1340  LOperand* divisor = UseRegister(instr->right());
1341  LDivI* result = new(zone()) LDivI(dividend, divisor, temp);
1342  return AssignEnvironment(DefineFixed(result, eax));
1343  } else {
1344  ASSERT(instr->representation().IsTagged());
1345  return DoArithmeticT(Token::DIV, instr);
1346  }
1347 }
1348 
1349 
1350 LInstruction* LChunkBuilder::DoMathFloorOfDiv(HMathFloorOfDiv* instr) {
1351  UNIMPLEMENTED();
1352  return NULL;
1353 }
1354 
1355 
1356 LInstruction* LChunkBuilder::DoMod(HMod* instr) {
1357  if (instr->representation().IsInteger32()) {
1358  ASSERT(instr->left()->representation().IsInteger32());
1359  ASSERT(instr->right()->representation().IsInteger32());
1360 
1361  LInstruction* result;
1362  if (instr->HasPowerOf2Divisor()) {
1363  ASSERT(!instr->CheckFlag(HValue::kCanBeDivByZero));
1364  LOperand* value = UseRegisterAtStart(instr->left());
1365  LModI* mod =
1366  new(zone()) LModI(value, UseOrConstant(instr->right()), NULL);
1367  result = DefineSameAsFirst(mod);
1368  } else {
1369  // The temporary operand is necessary to ensure that right is
1370  // not allocated into edx.
1371  LOperand* temp = FixedTemp(edx);
1372  LOperand* value = UseFixed(instr->left(), eax);
1373  LOperand* divisor = UseRegister(instr->right());
1374  LModI* mod = new(zone()) LModI(value, divisor, temp);
1375  result = DefineFixed(mod, edx);
1376  }
1377 
1378  return (instr->CheckFlag(HValue::kBailoutOnMinusZero) ||
1379  instr->CheckFlag(HValue::kCanBeDivByZero))
1380  ? AssignEnvironment(result)
1381  : result;
1382  } else if (instr->representation().IsTagged()) {
1383  return DoArithmeticT(Token::MOD, instr);
1384  } else {
1385  ASSERT(instr->representation().IsDouble());
1386  // We call a C function for double modulo. It can't trigger a GC.
1387  // We need to use fixed result register for the call.
1388  // TODO(fschneider): Allow any register as input registers.
1389  LOperand* left = UseFixedDouble(instr->left(), xmm2);
1390  LOperand* right = UseFixedDouble(instr->right(), xmm1);
1391  LArithmeticD* result = new(zone()) LArithmeticD(Token::MOD, left, right);
1392  return MarkAsCall(DefineFixedDouble(result, xmm1), instr);
1393  }
1394 }
1395 
1396 
1397 LInstruction* LChunkBuilder::DoMul(HMul* instr) {
1398  if (instr->representation().IsInteger32()) {
1399  ASSERT(instr->left()->representation().IsInteger32());
1400  ASSERT(instr->right()->representation().IsInteger32());
1401  LOperand* left = UseRegisterAtStart(instr->LeastConstantOperand());
1402  LOperand* right = UseOrConstant(instr->MostConstantOperand());
1403  LOperand* temp = NULL;
1404  if (instr->CheckFlag(HValue::kBailoutOnMinusZero)) {
1405  temp = TempRegister();
1406  }
1407  LMulI* mul = new(zone()) LMulI(left, right, temp);
1408  if (instr->CheckFlag(HValue::kCanOverflow) ||
1409  instr->CheckFlag(HValue::kBailoutOnMinusZero)) {
1410  AssignEnvironment(mul);
1411  }
1412  return DefineSameAsFirst(mul);
1413  } else if (instr->representation().IsDouble()) {
1414  return DoArithmeticD(Token::MUL, instr);
1415  } else {
1416  ASSERT(instr->representation().IsTagged());
1417  return DoArithmeticT(Token::MUL, instr);
1418  }
1419 }
1420 
1421 
1422 LInstruction* LChunkBuilder::DoSub(HSub* instr) {
1423  if (instr->representation().IsInteger32()) {
1424  ASSERT(instr->left()->representation().IsInteger32());
1425  ASSERT(instr->right()->representation().IsInteger32());
1426  LOperand* left = UseRegisterAtStart(instr->left());
1427  LOperand* right = UseOrConstantAtStart(instr->right());
1428  LSubI* sub = new(zone()) LSubI(left, right);
1429  LInstruction* result = DefineSameAsFirst(sub);
1430  if (instr->CheckFlag(HValue::kCanOverflow)) {
1431  result = AssignEnvironment(result);
1432  }
1433  return result;
1434  } else if (instr->representation().IsDouble()) {
1435  return DoArithmeticD(Token::SUB, instr);
1436  } else {
1437  ASSERT(instr->representation().IsTagged());
1438  return DoArithmeticT(Token::SUB, instr);
1439  }
1440 }
1441 
1442 
1443 LInstruction* LChunkBuilder::DoAdd(HAdd* instr) {
1444  if (instr->representation().IsInteger32()) {
1445  ASSERT(instr->left()->representation().IsInteger32());
1446  ASSERT(instr->right()->representation().IsInteger32());
1447  LOperand* left = UseRegisterAtStart(instr->LeastConstantOperand());
1448  LOperand* right = UseOrConstantAtStart(instr->MostConstantOperand());
1449  LAddI* add = new(zone()) LAddI(left, right);
1450  LInstruction* result = DefineSameAsFirst(add);
1451  if (instr->CheckFlag(HValue::kCanOverflow)) {
1452  result = AssignEnvironment(result);
1453  }
1454  return result;
1455  } else if (instr->representation().IsDouble()) {
1456  return DoArithmeticD(Token::ADD, instr);
1457  } else {
1458  ASSERT(instr->representation().IsTagged());
1459  return DoArithmeticT(Token::ADD, instr);
1460  }
1461 }
1462 
1463 
1464 LInstruction* LChunkBuilder::DoPower(HPower* instr) {
1465  ASSERT(instr->representation().IsDouble());
1466  // We call a C function for double power. It can't trigger a GC.
1467  // We need to use fixed result register for the call.
1468  Representation exponent_type = instr->right()->representation();
1469  ASSERT(instr->left()->representation().IsDouble());
1470  LOperand* left = UseFixedDouble(instr->left(), xmm2);
1471  LOperand* right = exponent_type.IsDouble() ?
1472  UseFixedDouble(instr->right(), xmm1) :
1473  UseFixed(instr->right(), eax);
1474  LPower* result = new(zone()) LPower(left, right);
1475  return MarkAsCall(DefineFixedDouble(result, xmm3), instr,
1476  CAN_DEOPTIMIZE_EAGERLY);
1477 }
1478 
1479 
1480 LInstruction* LChunkBuilder::DoRandom(HRandom* instr) {
1481  ASSERT(instr->representation().IsDouble());
1482  ASSERT(instr->global_object()->representation().IsTagged());
1483  LOperand* global_object = UseFixed(instr->global_object(), eax);
1484  LRandom* result = new(zone()) LRandom(global_object);
1485  return MarkAsCall(DefineFixedDouble(result, xmm1), instr);
1486 }
1487 
1488 
1489 LInstruction* LChunkBuilder::DoCompareGeneric(HCompareGeneric* instr) {
1490  ASSERT(instr->left()->representation().IsTagged());
1491  ASSERT(instr->right()->representation().IsTagged());
1492  LOperand* context = UseFixed(instr->context(), esi);
1493  LOperand* left = UseFixed(instr->left(), edx);
1494  LOperand* right = UseFixed(instr->right(), eax);
1495  LCmpT* result = new(zone()) LCmpT(context, left, right);
1496  return MarkAsCall(DefineFixed(result, eax), instr);
1497 }
1498 
1499 
1500 LInstruction* LChunkBuilder::DoCompareIDAndBranch(
1501  HCompareIDAndBranch* instr) {
1502  Representation r = instr->GetInputRepresentation();
1503  if (r.IsInteger32()) {
1504  ASSERT(instr->left()->representation().IsInteger32());
1505  ASSERT(instr->right()->representation().IsInteger32());
1506  LOperand* left = UseRegisterOrConstantAtStart(instr->left());
1507  LOperand* right = UseOrConstantAtStart(instr->right());
1508  return new(zone()) LCmpIDAndBranch(left, right);
1509  } else {
1510  ASSERT(r.IsDouble());
1511  ASSERT(instr->left()->representation().IsDouble());
1512  ASSERT(instr->right()->representation().IsDouble());
1513  LOperand* left;
1514  LOperand* right;
1515  if (instr->left()->IsConstant() && instr->right()->IsConstant()) {
1516  left = UseRegisterOrConstantAtStart(instr->left());
1517  right = UseRegisterOrConstantAtStart(instr->right());
1518  } else {
1519  left = UseRegisterAtStart(instr->left());
1520  right = UseRegisterAtStart(instr->right());
1521  }
1522  return new(zone()) LCmpIDAndBranch(left, right);
1523  }
1524 }
1525 
1526 
1527 LInstruction* LChunkBuilder::DoCompareObjectEqAndBranch(
1528  HCompareObjectEqAndBranch* instr) {
1529  LOperand* left = UseRegisterAtStart(instr->left());
1530  LOperand* right = UseAtStart(instr->right());
1531  return new(zone()) LCmpObjectEqAndBranch(left, right);
1532 }
1533 
1534 
1535 LInstruction* LChunkBuilder::DoCompareConstantEqAndBranch(
1536  HCompareConstantEqAndBranch* instr) {
1537  return new(zone()) LCmpConstantEqAndBranch(
1538  UseRegisterAtStart(instr->value()));
1539 }
1540 
1541 
1542 LInstruction* LChunkBuilder::DoIsNilAndBranch(HIsNilAndBranch* instr) {
1543  // We only need a temp register for non-strict compare.
1544  LOperand* temp = instr->kind() == kStrictEquality ? NULL : TempRegister();
1545  return new(zone()) LIsNilAndBranch(UseRegisterAtStart(instr->value()), temp);
1546 }
1547 
1548 
1549 LInstruction* LChunkBuilder::DoIsObjectAndBranch(HIsObjectAndBranch* instr) {
1550  ASSERT(instr->value()->representation().IsTagged());
1551  LOperand* temp = TempRegister();
1552  return new(zone()) LIsObjectAndBranch(UseRegister(instr->value()), temp);
1553 }
1554 
1555 
1556 LInstruction* LChunkBuilder::DoIsStringAndBranch(HIsStringAndBranch* instr) {
1557  ASSERT(instr->value()->representation().IsTagged());
1558  LOperand* temp = TempRegister();
1559  return new(zone()) LIsStringAndBranch(UseRegister(instr->value()), temp);
1560 }
1561 
1562 
1563 LInstruction* LChunkBuilder::DoIsSmiAndBranch(HIsSmiAndBranch* instr) {
1564  ASSERT(instr->value()->representation().IsTagged());
1565  return new(zone()) LIsSmiAndBranch(Use(instr->value()));
1566 }
1567 
1568 
1569 LInstruction* LChunkBuilder::DoIsUndetectableAndBranch(
1570  HIsUndetectableAndBranch* instr) {
1571  ASSERT(instr ->value()->representation().IsTagged());
1572  return new(zone()) LIsUndetectableAndBranch(
1573  UseRegisterAtStart(instr->value()), TempRegister());
1574 }
1575 
1576 
1577 LInstruction* LChunkBuilder::DoStringCompareAndBranch(
1578  HStringCompareAndBranch* instr) {
1579  ASSERT(instr->left()->representation().IsTagged());
1580  ASSERT(instr->right()->representation().IsTagged());
1581  LOperand* context = UseFixed(instr->context(), esi);
1582  LOperand* left = UseFixed(instr->left(), edx);
1583  LOperand* right = UseFixed(instr->right(), eax);
1584 
1585  LStringCompareAndBranch* result = new(zone())
1586  LStringCompareAndBranch(context, left, right);
1587 
1588  return MarkAsCall(result, instr);
1589 }
1590 
1591 
1592 LInstruction* LChunkBuilder::DoHasInstanceTypeAndBranch(
1593  HHasInstanceTypeAndBranch* instr) {
1594  ASSERT(instr->value()->representation().IsTagged());
1595  return new(zone()) LHasInstanceTypeAndBranch(
1596  UseRegisterAtStart(instr->value()),
1597  TempRegister());
1598 }
1599 
1600 
1601 LInstruction* LChunkBuilder::DoGetCachedArrayIndex(
1602  HGetCachedArrayIndex* instr) {
1603  ASSERT(instr->value()->representation().IsTagged());
1604  LOperand* value = UseRegisterAtStart(instr->value());
1605 
1606  return DefineAsRegister(new(zone()) LGetCachedArrayIndex(value));
1607 }
1608 
1609 
1610 LInstruction* LChunkBuilder::DoHasCachedArrayIndexAndBranch(
1611  HHasCachedArrayIndexAndBranch* instr) {
1612  ASSERT(instr->value()->representation().IsTagged());
1613  return new(zone()) LHasCachedArrayIndexAndBranch(
1614  UseRegisterAtStart(instr->value()));
1615 }
1616 
1617 
1618 LInstruction* LChunkBuilder::DoClassOfTestAndBranch(
1619  HClassOfTestAndBranch* instr) {
1620  ASSERT(instr->value()->representation().IsTagged());
1621  return new(zone()) LClassOfTestAndBranch(UseRegister(instr->value()),
1622  TempRegister(),
1623  TempRegister());
1624 }
1625 
1626 
1627 LInstruction* LChunkBuilder::DoJSArrayLength(HJSArrayLength* instr) {
1628  LOperand* array = UseRegisterAtStart(instr->value());
1629  return DefineAsRegister(new(zone()) LJSArrayLength(array));
1630 }
1631 
1632 
1633 LInstruction* LChunkBuilder::DoFixedArrayBaseLength(
1634  HFixedArrayBaseLength* instr) {
1635  LOperand* array = UseRegisterAtStart(instr->value());
1636  return DefineAsRegister(new(zone()) LFixedArrayBaseLength(array));
1637 }
1638 
1639 
1640 LInstruction* LChunkBuilder::DoElementsKind(HElementsKind* instr) {
1641  LOperand* object = UseRegisterAtStart(instr->value());
1642  return DefineAsRegister(new(zone()) LElementsKind(object));
1643 }
1644 
1645 
1646 LInstruction* LChunkBuilder::DoValueOf(HValueOf* instr) {
1647  LOperand* object = UseRegister(instr->value());
1648  LValueOf* result = new(zone()) LValueOf(object, TempRegister());
1649  return DefineSameAsFirst(result);
1650 }
1651 
1652 
1653 LInstruction* LChunkBuilder::DoDateField(HDateField* instr) {
1654  LOperand* date = UseFixed(instr->value(), eax);
1655  LDateField* result =
1656  new(zone()) LDateField(date, FixedTemp(ecx), instr->index());
1657  return MarkAsCall(DefineFixed(result, eax), instr);
1658 }
1659 
1660 
1661 LInstruction* LChunkBuilder::DoBoundsCheck(HBoundsCheck* instr) {
1662  return AssignEnvironment(new(zone()) LBoundsCheck(
1663  UseRegisterOrConstantAtStart(instr->index()),
1664  UseAtStart(instr->length())));
1665 }
1666 
1667 
1668 LInstruction* LChunkBuilder::DoAbnormalExit(HAbnormalExit* instr) {
1669  // The control instruction marking the end of a block that completed
1670  // abruptly (e.g., threw an exception). There is nothing specific to do.
1671  return NULL;
1672 }
1673 
1674 
1675 LInstruction* LChunkBuilder::DoThrow(HThrow* instr) {
1676  LOperand* context = UseFixed(instr->context(), esi);
1677  LOperand* value = UseFixed(instr->value(), eax);
1678  return MarkAsCall(new(zone()) LThrow(context, value), instr);
1679 }
1680 
1681 
1682 LInstruction* LChunkBuilder::DoUseConst(HUseConst* instr) {
1683  return NULL;
1684 }
1685 
1686 
1687 LInstruction* LChunkBuilder::DoForceRepresentation(HForceRepresentation* bad) {
1688  // All HForceRepresentation instructions should be eliminated in the
1689  // representation change phase of Hydrogen.
1690  UNREACHABLE();
1691  return NULL;
1692 }
1693 
1694 
1695 LInstruction* LChunkBuilder::DoChange(HChange* instr) {
1696  Representation from = instr->from();
1697  Representation to = instr->to();
1698  if (from.IsTagged()) {
1699  if (to.IsDouble()) {
1700  LOperand* value = UseRegister(instr->value());
1701  // Temp register only necessary for minus zero check.
1702  LOperand* temp = instr->deoptimize_on_minus_zero()
1703  ? TempRegister()
1704  : NULL;
1705  LNumberUntagD* res = new(zone()) LNumberUntagD(value, temp);
1706  return AssignEnvironment(DefineAsRegister(res));
1707  } else {
1708  ASSERT(to.IsInteger32());
1709  LOperand* value = UseRegister(instr->value());
1710  if (instr->value()->type().IsSmi()) {
1711  return DefineSameAsFirst(new(zone()) LSmiUntag(value, false));
1712  } else {
1713  bool truncating = instr->CanTruncateToInt32();
1714  LOperand* xmm_temp =
1715  (truncating && CpuFeatures::IsSupported(SSE3))
1716  ? NULL
1717  : FixedTemp(xmm1);
1718  LTaggedToI* res = new(zone()) LTaggedToI(value, xmm_temp);
1719  return AssignEnvironment(DefineSameAsFirst(res));
1720  }
1721  }
1722  } else if (from.IsDouble()) {
1723  if (to.IsTagged()) {
1724  LOperand* value = UseRegister(instr->value());
1725  LOperand* temp = TempRegister();
1726 
1727  // Make sure that temp and result_temp are different registers.
1728  LUnallocated* result_temp = TempRegister();
1729  LNumberTagD* result = new(zone()) LNumberTagD(value, temp);
1730  return AssignPointerMap(Define(result, result_temp));
1731  } else {
1732  ASSERT(to.IsInteger32());
1733  bool truncating = instr->CanTruncateToInt32();
1734  bool needs_temp = truncating && !CpuFeatures::IsSupported(SSE3);
1735  LOperand* value = needs_temp ?
1736  UseTempRegister(instr->value()) : UseRegister(instr->value());
1737  LOperand* temp = needs_temp ? TempRegister() : NULL;
1738  return AssignEnvironment(
1739  DefineAsRegister(new(zone()) LDoubleToI(value, temp)));
1740  }
1741  } else if (from.IsInteger32()) {
1742  if (to.IsTagged()) {
1743  HValue* val = instr->value();
1744  LOperand* value = UseRegister(val);
1745  if (val->HasRange() && val->range()->IsInSmiRange()) {
1746  return DefineSameAsFirst(new(zone()) LSmiTag(value));
1747  } else {
1748  LNumberTagI* result = new(zone()) LNumberTagI(value);
1749  return AssignEnvironment(AssignPointerMap(DefineSameAsFirst(result)));
1750  }
1751  } else {
1752  ASSERT(to.IsDouble());
1753  return DefineAsRegister(
1754  new(zone()) LInteger32ToDouble(Use(instr->value())));
1755  }
1756  }
1757  UNREACHABLE();
1758  return NULL;
1759 }
1760 
1761 
1762 LInstruction* LChunkBuilder::DoCheckNonSmi(HCheckNonSmi* instr) {
1763  LOperand* value = UseAtStart(instr->value());
1764  return AssignEnvironment(new(zone()) LCheckNonSmi(value));
1765 }
1766 
1767 
1768 LInstruction* LChunkBuilder::DoCheckInstanceType(HCheckInstanceType* instr) {
1769  LOperand* value = UseRegisterAtStart(instr->value());
1770  LOperand* temp = TempRegister();
1771  LCheckInstanceType* result = new(zone()) LCheckInstanceType(value, temp);
1772  return AssignEnvironment(result);
1773 }
1774 
1775 
1776 LInstruction* LChunkBuilder::DoCheckPrototypeMaps(HCheckPrototypeMaps* instr) {
1777  LOperand* temp = TempRegister();
1778  LCheckPrototypeMaps* result = new(zone()) LCheckPrototypeMaps(temp);
1779  return AssignEnvironment(result);
1780 }
1781 
1782 
1783 LInstruction* LChunkBuilder::DoCheckSmi(HCheckSmi* instr) {
1784  LOperand* value = UseAtStart(instr->value());
1785  return AssignEnvironment(new(zone()) LCheckSmi(value));
1786 }
1787 
1788 
1789 LInstruction* LChunkBuilder::DoCheckFunction(HCheckFunction* instr) {
1790  // If the target is in new space, we'll emit a global cell compare and so
1791  // want the value in a register. If the target gets promoted before we
1792  // emit code, we will still get the register but will do an immediate
1793  // compare instead of the cell compare. This is safe.
1794  LOperand* value = Isolate::Current()->heap()->InNewSpace(*instr->target())
1795  ? UseRegisterAtStart(instr->value())
1796  : UseAtStart(instr->value());
1797  return AssignEnvironment(new(zone()) LCheckFunction(value));
1798 }
1799 
1800 
1801 LInstruction* LChunkBuilder::DoCheckMaps(HCheckMaps* instr) {
1802  LOperand* value = UseRegisterAtStart(instr->value());
1803  LCheckMaps* result = new(zone()) LCheckMaps(value);
1804  return AssignEnvironment(result);
1805 }
1806 
1807 
1808 LInstruction* LChunkBuilder::DoClampToUint8(HClampToUint8* instr) {
1809  HValue* value = instr->value();
1810  Representation input_rep = value->representation();
1811  if (input_rep.IsDouble()) {
1812  LOperand* reg = UseRegister(value);
1813  return DefineAsRegister(new(zone()) LClampDToUint8(reg));
1814  } else if (input_rep.IsInteger32()) {
1815  LOperand* reg = UseFixed(value, eax);
1816  return DefineFixed(new(zone()) LClampIToUint8(reg), eax);
1817  } else {
1818  ASSERT(input_rep.IsTagged());
1819  LOperand* reg = UseFixed(value, eax);
1820  // Register allocator doesn't (yet) support allocation of double
1821  // temps. Reserve xmm1 explicitly.
1822  LOperand* temp = FixedTemp(xmm1);
1823  LClampTToUint8* result = new(zone()) LClampTToUint8(reg, temp);
1824  return AssignEnvironment(DefineFixed(result, eax));
1825  }
1826 }
1827 
1828 
1829 LInstruction* LChunkBuilder::DoReturn(HReturn* instr) {
1830  return new(zone()) LReturn(UseFixed(instr->value(), eax));
1831 }
1832 
1833 
1834 LInstruction* LChunkBuilder::DoConstant(HConstant* instr) {
1835  Representation r = instr->representation();
1836  if (r.IsInteger32()) {
1837  return DefineAsRegister(new(zone()) LConstantI);
1838  } else if (r.IsDouble()) {
1839  double value = instr->DoubleValue();
1840  LOperand* temp = (BitCast<uint64_t, double>(value) != 0)
1841  ? TempRegister()
1842  : NULL;
1843  return DefineAsRegister(new(zone()) LConstantD(temp));
1844  } else if (r.IsTagged()) {
1845  return DefineAsRegister(new(zone()) LConstantT);
1846  } else {
1847  UNREACHABLE();
1848  return NULL;
1849  }
1850 }
1851 
1852 
1853 LInstruction* LChunkBuilder::DoLoadGlobalCell(HLoadGlobalCell* instr) {
1854  LLoadGlobalCell* result = new(zone()) LLoadGlobalCell;
1855  return instr->RequiresHoleCheck()
1856  ? AssignEnvironment(DefineAsRegister(result))
1857  : DefineAsRegister(result);
1858 }
1859 
1860 
1861 LInstruction* LChunkBuilder::DoLoadGlobalGeneric(HLoadGlobalGeneric* instr) {
1862  LOperand* context = UseFixed(instr->context(), esi);
1863  LOperand* global_object = UseFixed(instr->global_object(), edx);
1864  LLoadGlobalGeneric* result =
1865  new(zone()) LLoadGlobalGeneric(context, global_object);
1866  return MarkAsCall(DefineFixed(result, eax), instr);
1867 }
1868 
1869 
1870 LInstruction* LChunkBuilder::DoStoreGlobalCell(HStoreGlobalCell* instr) {
1871  LStoreGlobalCell* result =
1872  new(zone()) LStoreGlobalCell(UseRegister(instr->value()));
1873  return instr->RequiresHoleCheck() ? AssignEnvironment(result) : result;
1874 }
1875 
1876 
1877 LInstruction* LChunkBuilder::DoStoreGlobalGeneric(HStoreGlobalGeneric* instr) {
1878  LOperand* context = UseFixed(instr->context(), esi);
1879  LOperand* global_object = UseFixed(instr->global_object(), edx);
1880  LOperand* value = UseFixed(instr->value(), eax);
1881  LStoreGlobalGeneric* result =
1882  new(zone()) LStoreGlobalGeneric(context, global_object, value);
1883  return MarkAsCall(result, instr);
1884 }
1885 
1886 
1887 LInstruction* LChunkBuilder::DoLoadContextSlot(HLoadContextSlot* instr) {
1888  LOperand* context = UseRegisterAtStart(instr->value());
1889  LInstruction* result =
1890  DefineAsRegister(new(zone()) LLoadContextSlot(context));
1891  return instr->RequiresHoleCheck() ? AssignEnvironment(result) : result;
1892 }
1893 
1894 
1895 LInstruction* LChunkBuilder::DoStoreContextSlot(HStoreContextSlot* instr) {
1896  LOperand* value;
1897  LOperand* temp;
1898  LOperand* context = UseRegister(instr->context());
1899  if (instr->NeedsWriteBarrier()) {
1900  value = UseTempRegister(instr->value());
1901  temp = TempRegister();
1902  } else {
1903  value = UseRegister(instr->value());
1904  temp = NULL;
1905  }
1906  LInstruction* result = new(zone()) LStoreContextSlot(context, value, temp);
1907  return instr->RequiresHoleCheck() ? AssignEnvironment(result) : result;
1908 }
1909 
1910 
1911 LInstruction* LChunkBuilder::DoLoadNamedField(HLoadNamedField* instr) {
1912  ASSERT(instr->representation().IsTagged());
1913  LOperand* obj = UseRegisterAtStart(instr->object());
1914  return DefineAsRegister(new(zone()) LLoadNamedField(obj));
1915 }
1916 
1917 
1918 LInstruction* LChunkBuilder::DoLoadNamedFieldPolymorphic(
1919  HLoadNamedFieldPolymorphic* instr) {
1920  ASSERT(instr->representation().IsTagged());
1921  if (instr->need_generic()) {
1922  LOperand* context = UseFixed(instr->context(), esi);
1923  LOperand* obj = UseFixed(instr->object(), edx);
1924  LLoadNamedFieldPolymorphic* result =
1925  new(zone()) LLoadNamedFieldPolymorphic(context, obj);
1926  return MarkAsCall(DefineFixed(result, eax), instr);
1927  } else {
1928  LOperand* context = UseAny(instr->context()); // Not actually used.
1929  LOperand* obj = UseRegisterAtStart(instr->object());
1930  LLoadNamedFieldPolymorphic* result =
1931  new(zone()) LLoadNamedFieldPolymorphic(context, obj);
1932  return AssignEnvironment(DefineAsRegister(result));
1933  }
1934 }
1935 
1936 
1937 LInstruction* LChunkBuilder::DoLoadNamedGeneric(HLoadNamedGeneric* instr) {
1938  LOperand* context = UseFixed(instr->context(), esi);
1939  LOperand* object = UseFixed(instr->object(), edx);
1940  LLoadNamedGeneric* result = new(zone()) LLoadNamedGeneric(context, object);
1941  return MarkAsCall(DefineFixed(result, eax), instr);
1942 }
1943 
1944 
1945 LInstruction* LChunkBuilder::DoLoadFunctionPrototype(
1946  HLoadFunctionPrototype* instr) {
1947  return AssignEnvironment(DefineAsRegister(
1948  new(zone()) LLoadFunctionPrototype(UseRegister(instr->function()),
1949  TempRegister())));
1950 }
1951 
1952 
1953 LInstruction* LChunkBuilder::DoLoadElements(HLoadElements* instr) {
1954  LOperand* input = UseRegisterAtStart(instr->value());
1955  return DefineAsRegister(new(zone()) LLoadElements(input));
1956 }
1957 
1958 
1959 LInstruction* LChunkBuilder::DoLoadExternalArrayPointer(
1960  HLoadExternalArrayPointer* instr) {
1961  LOperand* input = UseRegisterAtStart(instr->value());
1962  return DefineAsRegister(new(zone()) LLoadExternalArrayPointer(input));
1963 }
1964 
1965 
1966 LInstruction* LChunkBuilder::DoLoadKeyedFastElement(
1967  HLoadKeyedFastElement* instr) {
1968  ASSERT(instr->representation().IsTagged());
1969  ASSERT(instr->key()->representation().IsInteger32());
1970  LOperand* obj = UseRegisterAtStart(instr->object());
1971  LOperand* key = UseRegisterOrConstantAtStart(instr->key());
1972  LLoadKeyedFastElement* result = new(zone()) LLoadKeyedFastElement(obj, key);
1973  if (instr->RequiresHoleCheck()) AssignEnvironment(result);
1974  return DefineAsRegister(result);
1975 }
1976 
1977 
1978 LInstruction* LChunkBuilder::DoLoadKeyedFastDoubleElement(
1979  HLoadKeyedFastDoubleElement* instr) {
1980  ASSERT(instr->representation().IsDouble());
1981  ASSERT(instr->key()->representation().IsInteger32());
1982  LOperand* elements = UseRegisterAtStart(instr->elements());
1983  LOperand* key = UseRegisterOrConstantAtStart(instr->key());
1984  LLoadKeyedFastDoubleElement* result =
1985  new(zone()) LLoadKeyedFastDoubleElement(elements, key);
1986  return AssignEnvironment(DefineAsRegister(result));
1987 }
1988 
1989 
1990 LInstruction* LChunkBuilder::DoLoadKeyedSpecializedArrayElement(
1991  HLoadKeyedSpecializedArrayElement* instr) {
1992  ElementsKind elements_kind = instr->elements_kind();
1993  ASSERT(
1994  (instr->representation().IsInteger32() &&
1995  (elements_kind != EXTERNAL_FLOAT_ELEMENTS) &&
1996  (elements_kind != EXTERNAL_DOUBLE_ELEMENTS)) ||
1997  (instr->representation().IsDouble() &&
1998  ((elements_kind == EXTERNAL_FLOAT_ELEMENTS) ||
1999  (elements_kind == EXTERNAL_DOUBLE_ELEMENTS))));
2000  ASSERT(instr->key()->representation().IsInteger32());
2001  LOperand* external_pointer = UseRegister(instr->external_pointer());
2002  LOperand* key = UseRegisterOrConstant(instr->key());
2003  LLoadKeyedSpecializedArrayElement* result =
2004  new(zone()) LLoadKeyedSpecializedArrayElement(external_pointer, key);
2005  LInstruction* load_instr = DefineAsRegister(result);
2006  // An unsigned int array load might overflow and cause a deopt, make sure it
2007  // has an environment.
2008  return (elements_kind == EXTERNAL_UNSIGNED_INT_ELEMENTS)
2009  ? AssignEnvironment(load_instr)
2010  : load_instr;
2011 }
2012 
2013 
2014 LInstruction* LChunkBuilder::DoLoadKeyedGeneric(HLoadKeyedGeneric* instr) {
2015  LOperand* context = UseFixed(instr->context(), esi);
2016  LOperand* object = UseFixed(instr->object(), edx);
2017  LOperand* key = UseFixed(instr->key(), ecx);
2018 
2019  LLoadKeyedGeneric* result =
2020  new(zone()) LLoadKeyedGeneric(context, object, key);
2021  return MarkAsCall(DefineFixed(result, eax), instr);
2022 }
2023 
2024 
2025 LInstruction* LChunkBuilder::DoStoreKeyedFastElement(
2026  HStoreKeyedFastElement* instr) {
2027  bool needs_write_barrier = instr->NeedsWriteBarrier();
2028  ASSERT(instr->value()->representation().IsTagged());
2029  ASSERT(instr->object()->representation().IsTagged());
2030  ASSERT(instr->key()->representation().IsInteger32());
2031 
2032  LOperand* obj = UseRegister(instr->object());
2033  LOperand* val = needs_write_barrier
2034  ? UseTempRegister(instr->value())
2035  : UseRegisterAtStart(instr->value());
2036  LOperand* key = needs_write_barrier
2037  ? UseTempRegister(instr->key())
2038  : UseRegisterOrConstantAtStart(instr->key());
2039  return new(zone()) LStoreKeyedFastElement(obj, key, val);
2040 }
2041 
2042 
2043 LInstruction* LChunkBuilder::DoStoreKeyedFastDoubleElement(
2044  HStoreKeyedFastDoubleElement* instr) {
2045  ASSERT(instr->value()->representation().IsDouble());
2046  ASSERT(instr->elements()->representation().IsTagged());
2047  ASSERT(instr->key()->representation().IsInteger32());
2048 
2049  LOperand* elements = UseRegisterAtStart(instr->elements());
2050  LOperand* val = UseTempRegister(instr->value());
2051  LOperand* key = UseRegisterOrConstantAtStart(instr->key());
2052 
2053  return new(zone()) LStoreKeyedFastDoubleElement(elements, key, val);
2054 }
2055 
2056 
2057 LInstruction* LChunkBuilder::DoStoreKeyedSpecializedArrayElement(
2058  HStoreKeyedSpecializedArrayElement* instr) {
2059  ElementsKind elements_kind = instr->elements_kind();
2060  ASSERT(
2061  (instr->value()->representation().IsInteger32() &&
2062  (elements_kind != EXTERNAL_FLOAT_ELEMENTS) &&
2063  (elements_kind != EXTERNAL_DOUBLE_ELEMENTS)) ||
2064  (instr->value()->representation().IsDouble() &&
2065  ((elements_kind == EXTERNAL_FLOAT_ELEMENTS) ||
2066  (elements_kind == EXTERNAL_DOUBLE_ELEMENTS))));
2067  ASSERT(instr->external_pointer()->representation().IsExternal());
2068  ASSERT(instr->key()->representation().IsInteger32());
2069 
2070  LOperand* external_pointer = UseRegister(instr->external_pointer());
2071  LOperand* key = UseRegisterOrConstant(instr->key());
2072  LOperand* val = NULL;
2073  if (elements_kind == EXTERNAL_BYTE_ELEMENTS ||
2074  elements_kind == EXTERNAL_UNSIGNED_BYTE_ELEMENTS ||
2075  elements_kind == EXTERNAL_PIXEL_ELEMENTS) {
2076  // We need a byte register in this case for the value.
2077  val = UseFixed(instr->value(), eax);
2078  } else {
2079  val = UseRegister(instr->value());
2080  }
2081 
2082  return new(zone()) LStoreKeyedSpecializedArrayElement(external_pointer,
2083  key,
2084  val);
2085 }
2086 
2087 
2088 LInstruction* LChunkBuilder::DoStoreKeyedGeneric(HStoreKeyedGeneric* instr) {
2089  LOperand* context = UseFixed(instr->context(), esi);
2090  LOperand* object = UseFixed(instr->object(), edx);
2091  LOperand* key = UseFixed(instr->key(), ecx);
2092  LOperand* value = UseFixed(instr->value(), eax);
2093 
2094  ASSERT(instr->object()->representation().IsTagged());
2095  ASSERT(instr->key()->representation().IsTagged());
2096  ASSERT(instr->value()->representation().IsTagged());
2097 
2098  LStoreKeyedGeneric* result =
2099  new(zone()) LStoreKeyedGeneric(context, object, key, value);
2100  return MarkAsCall(result, instr);
2101 }
2102 
2103 
2104 LInstruction* LChunkBuilder::DoTransitionElementsKind(
2105  HTransitionElementsKind* instr) {
2106  ElementsKind from_kind = instr->original_map()->elements_kind();
2107  ElementsKind to_kind = instr->transitioned_map()->elements_kind();
2108  if (IsSimpleMapChangeTransition(from_kind, to_kind)) {
2109  LOperand* object = UseRegister(instr->object());
2110  LOperand* new_map_reg = TempRegister();
2111  LOperand* temp_reg = TempRegister();
2112  LTransitionElementsKind* result =
2113  new(zone()) LTransitionElementsKind(object, new_map_reg, temp_reg);
2114  return DefineSameAsFirst(result);
2115  } else {
2116  LOperand* object = UseFixed(instr->object(), eax);
2117  LOperand* fixed_object_reg = FixedTemp(edx);
2118  LOperand* new_map_reg = FixedTemp(ebx);
2119  LTransitionElementsKind* result =
2120  new(zone()) LTransitionElementsKind(object,
2121  new_map_reg,
2122  fixed_object_reg);
2123  return MarkAsCall(DefineFixed(result, eax), instr);
2124  }
2125 }
2126 
2127 
2128 LInstruction* LChunkBuilder::DoStoreNamedField(HStoreNamedField* instr) {
2129  bool needs_write_barrier = instr->NeedsWriteBarrier();
2130  bool needs_write_barrier_for_map = !instr->transition().is_null() &&
2131  instr->NeedsWriteBarrierForMap();
2132 
2133  LOperand* obj;
2134  if (needs_write_barrier) {
2135  obj = instr->is_in_object()
2136  ? UseRegister(instr->object())
2137  : UseTempRegister(instr->object());
2138  } else {
2139  obj = needs_write_barrier_for_map
2140  ? UseRegister(instr->object())
2141  : UseRegisterAtStart(instr->object());
2142  }
2143 
2144  LOperand* val = needs_write_barrier
2145  ? UseTempRegister(instr->value())
2146  : UseRegister(instr->value());
2147 
2148  // We only need a scratch register if we have a write barrier or we
2149  // have a store into the properties array (not in-object-property).
2150  LOperand* temp = (!instr->is_in_object() || needs_write_barrier ||
2151  needs_write_barrier_for_map) ? TempRegister() : NULL;
2152 
2153  // We need a temporary register for write barrier of the map field.
2154  LOperand* temp_map = needs_write_barrier_for_map ? TempRegister() : NULL;
2155 
2156  return new(zone()) LStoreNamedField(obj, val, temp, temp_map);
2157 }
2158 
2159 
2160 LInstruction* LChunkBuilder::DoStoreNamedGeneric(HStoreNamedGeneric* instr) {
2161  LOperand* context = UseFixed(instr->context(), esi);
2162  LOperand* object = UseFixed(instr->object(), edx);
2163  LOperand* value = UseFixed(instr->value(), eax);
2164 
2165  LStoreNamedGeneric* result =
2166  new(zone()) LStoreNamedGeneric(context, object, value);
2167  return MarkAsCall(result, instr);
2168 }
2169 
2170 
2171 LInstruction* LChunkBuilder::DoStringAdd(HStringAdd* instr) {
2172  LOperand* context = UseFixed(instr->context(), esi);
2173  LOperand* left = UseOrConstantAtStart(instr->left());
2174  LOperand* right = UseOrConstantAtStart(instr->right());
2175  LStringAdd* string_add = new(zone()) LStringAdd(context, left, right);
2176  return MarkAsCall(DefineFixed(string_add, eax), instr);
2177 }
2178 
2179 
2180 LInstruction* LChunkBuilder::DoStringCharCodeAt(HStringCharCodeAt* instr) {
2181  LOperand* string = UseTempRegister(instr->string());
2182  LOperand* index = UseTempRegister(instr->index());
2183  LOperand* context = UseAny(instr->context());
2184  LStringCharCodeAt* result =
2185  new(zone()) LStringCharCodeAt(context, string, index);
2186  return AssignEnvironment(AssignPointerMap(DefineAsRegister(result)));
2187 }
2188 
2189 
2190 LInstruction* LChunkBuilder::DoStringCharFromCode(HStringCharFromCode* instr) {
2191  LOperand* char_code = UseRegister(instr->value());
2192  LOperand* context = UseAny(instr->context());
2193  LStringCharFromCode* result =
2194  new(zone()) LStringCharFromCode(context, char_code);
2195  return AssignPointerMap(DefineAsRegister(result));
2196 }
2197 
2198 
2199 LInstruction* LChunkBuilder::DoStringLength(HStringLength* instr) {
2200  LOperand* string = UseRegisterAtStart(instr->value());
2201  return DefineAsRegister(new(zone()) LStringLength(string));
2202 }
2203 
2204 
2205 LInstruction* LChunkBuilder::DoAllocateObject(HAllocateObject* instr) {
2206  LOperand* context = UseFixed(instr->context(), esi);
2207  LOperand* temp = TempRegister();
2208  LAllocateObject* result = new(zone()) LAllocateObject(context, temp);
2209  return AssignPointerMap(DefineAsRegister(result));
2210 }
2211 
2212 
2213 LInstruction* LChunkBuilder::DoFastLiteral(HFastLiteral* instr) {
2214  LOperand* context = UseFixed(instr->context(), esi);
2215  return MarkAsCall(
2216  DefineFixed(new(zone()) LFastLiteral(context), eax), instr);
2217 }
2218 
2219 
2220 LInstruction* LChunkBuilder::DoArrayLiteral(HArrayLiteral* instr) {
2221  LOperand* context = UseFixed(instr->context(), esi);
2222  return MarkAsCall(
2223  DefineFixed(new(zone()) LArrayLiteral(context), eax), instr);
2224 }
2225 
2226 
2227 LInstruction* LChunkBuilder::DoObjectLiteral(HObjectLiteral* instr) {
2228  LOperand* context = UseFixed(instr->context(), esi);
2229  return MarkAsCall(
2230  DefineFixed(new(zone()) LObjectLiteral(context), eax), instr);
2231 }
2232 
2233 
2234 LInstruction* LChunkBuilder::DoRegExpLiteral(HRegExpLiteral* instr) {
2235  LOperand* context = UseFixed(instr->context(), esi);
2236  return MarkAsCall(
2237  DefineFixed(new(zone()) LRegExpLiteral(context), eax), instr);
2238 }
2239 
2240 
2241 LInstruction* LChunkBuilder::DoFunctionLiteral(HFunctionLiteral* instr) {
2242  LOperand* context = UseFixed(instr->context(), esi);
2243  return MarkAsCall(
2244  DefineFixed(new(zone()) LFunctionLiteral(context), eax), instr);
2245 }
2246 
2247 
2248 LInstruction* LChunkBuilder::DoDeleteProperty(HDeleteProperty* instr) {
2249  LOperand* context = UseFixed(instr->context(), esi);
2250  LOperand* object = UseAtStart(instr->object());
2251  LOperand* key = UseOrConstantAtStart(instr->key());
2252  LDeleteProperty* result = new(zone()) LDeleteProperty(context, object, key);
2253  return MarkAsCall(DefineFixed(result, eax), instr);
2254 }
2255 
2256 
2257 LInstruction* LChunkBuilder::DoOsrEntry(HOsrEntry* instr) {
2258  allocator_->MarkAsOsrEntry();
2259  current_block_->last_environment()->set_ast_id(instr->ast_id());
2260  return AssignEnvironment(new(zone()) LOsrEntry);
2261 }
2262 
2263 
2264 LInstruction* LChunkBuilder::DoParameter(HParameter* instr) {
2265  int spill_index = chunk()->GetParameterStackSlot(instr->index());
2266  return DefineAsSpilled(new(zone()) LParameter, spill_index);
2267 }
2268 
2269 
2270 LInstruction* LChunkBuilder::DoUnknownOSRValue(HUnknownOSRValue* instr) {
2271  int spill_index = chunk()->GetNextSpillIndex(false); // Not double-width.
2272  if (spill_index > LUnallocated::kMaxFixedIndex) {
2273  Abort("Too many spill slots needed for OSR");
2274  spill_index = 0;
2275  }
2276  return DefineAsSpilled(new(zone()) LUnknownOSRValue, spill_index);
2277 }
2278 
2279 
2280 LInstruction* LChunkBuilder::DoCallStub(HCallStub* instr) {
2281  LOperand* context = UseFixed(instr->context(), esi);
2282  argument_count_ -= instr->argument_count();
2283  LCallStub* result = new(zone()) LCallStub(context);
2284  return MarkAsCall(DefineFixed(result, eax), instr);
2285 }
2286 
2287 
2288 LInstruction* LChunkBuilder::DoArgumentsObject(HArgumentsObject* instr) {
2289  // There are no real uses of the arguments object.
2290  // arguments.length and element access are supported directly on
2291  // stack arguments, and any real arguments object use causes a bailout.
2292  // So this value is never used.
2293  return NULL;
2294 }
2295 
2296 
2297 LInstruction* LChunkBuilder::DoAccessArgumentsAt(HAccessArgumentsAt* instr) {
2298  LOperand* arguments = UseRegister(instr->arguments());
2299  LOperand* length = UseTempRegister(instr->length());
2300  LOperand* index = Use(instr->index());
2301  LAccessArgumentsAt* result =
2302  new(zone()) LAccessArgumentsAt(arguments, length, index);
2303  return AssignEnvironment(DefineAsRegister(result));
2304 }
2305 
2306 
2307 LInstruction* LChunkBuilder::DoToFastProperties(HToFastProperties* instr) {
2308  LOperand* object = UseFixed(instr->value(), eax);
2309  LToFastProperties* result = new(zone()) LToFastProperties(object);
2310  return MarkAsCall(DefineFixed(result, eax), instr);
2311 }
2312 
2313 
2314 LInstruction* LChunkBuilder::DoTypeof(HTypeof* instr) {
2315  LOperand* context = UseFixed(instr->context(), esi);
2316  LOperand* value = UseAtStart(instr->value());
2317  LTypeof* result = new(zone()) LTypeof(context, value);
2318  return MarkAsCall(DefineFixed(result, eax), instr);
2319 }
2320 
2321 
2322 LInstruction* LChunkBuilder::DoTypeofIsAndBranch(HTypeofIsAndBranch* instr) {
2323  return new(zone()) LTypeofIsAndBranch(UseTempRegister(instr->value()));
2324 }
2325 
2326 
2327 LInstruction* LChunkBuilder::DoIsConstructCallAndBranch(
2328  HIsConstructCallAndBranch* instr) {
2329  return new(zone()) LIsConstructCallAndBranch(TempRegister());
2330 }
2331 
2332 
2333 LInstruction* LChunkBuilder::DoSimulate(HSimulate* instr) {
2334  HEnvironment* env = current_block_->last_environment();
2335  ASSERT(env != NULL);
2336 
2337  env->set_ast_id(instr->ast_id());
2338 
2339  env->Drop(instr->pop_count());
2340  for (int i = 0; i < instr->values()->length(); ++i) {
2341  HValue* value = instr->values()->at(i);
2342  if (instr->HasAssignedIndexAt(i)) {
2343  env->Bind(instr->GetAssignedIndexAt(i), value);
2344  } else {
2345  env->Push(value);
2346  }
2347  }
2348 
2349  // If there is an instruction pending deoptimization environment create a
2350  // lazy bailout instruction to capture the environment.
2351  if (pending_deoptimization_ast_id_ != AstNode::kNoNumber) {
2352  ASSERT(pending_deoptimization_ast_id_ == instr->ast_id());
2353  LLazyBailout* lazy_bailout = new(zone()) LLazyBailout;
2354  LInstruction* result = AssignEnvironment(lazy_bailout);
2355  // Store the lazy deopt environment with the instruction if needed. Right
2356  // now it is only used for LInstanceOfKnownGlobal.
2357  instruction_pending_deoptimization_environment_->
2358  SetDeferredLazyDeoptimizationEnvironment(result->environment());
2359  instruction_pending_deoptimization_environment_ = NULL;
2360  pending_deoptimization_ast_id_ = AstNode::kNoNumber;
2361  return result;
2362  }
2363 
2364  return NULL;
2365 }
2366 
2367 
2368 LInstruction* LChunkBuilder::DoStackCheck(HStackCheck* instr) {
2369  if (instr->is_function_entry()) {
2370  LOperand* context = UseFixed(instr->context(), esi);
2371  return MarkAsCall(new(zone()) LStackCheck(context), instr);
2372  } else {
2373  ASSERT(instr->is_backwards_branch());
2374  LOperand* context = UseAny(instr->context());
2375  return AssignEnvironment(
2376  AssignPointerMap(new(zone()) LStackCheck(context)));
2377  }
2378 }
2379 
2380 
2381 LInstruction* LChunkBuilder::DoEnterInlined(HEnterInlined* instr) {
2382  HEnvironment* outer = current_block_->last_environment();
2383  HConstant* undefined = graph()->GetConstantUndefined();
2384  HEnvironment* inner = outer->CopyForInlining(instr->closure(),
2385  instr->arguments_count(),
2386  instr->function(),
2387  undefined,
2388  instr->call_kind(),
2389  instr->is_construct());
2390  if (instr->arguments_var() != NULL) {
2391  inner->Bind(instr->arguments_var(), graph()->GetArgumentsObject());
2392  }
2393  current_block_->UpdateEnvironment(inner);
2394  chunk_->AddInlinedClosure(instr->closure());
2395  return NULL;
2396 }
2397 
2398 
2399 LInstruction* LChunkBuilder::DoLeaveInlined(HLeaveInlined* instr) {
2400  LInstruction* pop = NULL;
2401 
2402  HEnvironment* env = current_block_->last_environment();
2403 
2404  if (instr->arguments_pushed()) {
2405  int argument_count = env->arguments_environment()->parameter_count();
2406  pop = new(zone()) LDrop(argument_count);
2407  argument_count_ -= argument_count;
2408  }
2409 
2410  HEnvironment* outer = current_block_->last_environment()->
2411  DiscardInlined(false);
2412  current_block_->UpdateEnvironment(outer);
2413  return pop;
2414 }
2415 
2416 
2417 LInstruction* LChunkBuilder::DoIn(HIn* instr) {
2418  LOperand* context = UseFixed(instr->context(), esi);
2419  LOperand* key = UseOrConstantAtStart(instr->key());
2420  LOperand* object = UseOrConstantAtStart(instr->object());
2421  LIn* result = new(zone()) LIn(context, key, object);
2422  return MarkAsCall(DefineFixed(result, eax), instr);
2423 }
2424 
2425 
2426 LInstruction* LChunkBuilder::DoForInPrepareMap(HForInPrepareMap* instr) {
2427  LOperand* context = UseFixed(instr->context(), esi);
2428  LOperand* object = UseFixed(instr->enumerable(), eax);
2429  LForInPrepareMap* result = new(zone()) LForInPrepareMap(context, object);
2430  return MarkAsCall(DefineFixed(result, eax), instr, CAN_DEOPTIMIZE_EAGERLY);
2431 }
2432 
2433 
2434 LInstruction* LChunkBuilder::DoForInCacheArray(HForInCacheArray* instr) {
2435  LOperand* map = UseRegister(instr->map());
2436  return AssignEnvironment(DefineAsRegister(
2437  new(zone()) LForInCacheArray(map)));
2438 }
2439 
2440 
2441 LInstruction* LChunkBuilder::DoCheckMapValue(HCheckMapValue* instr) {
2442  LOperand* value = UseRegisterAtStart(instr->value());
2443  LOperand* map = UseRegisterAtStart(instr->map());
2444  return AssignEnvironment(new(zone()) LCheckMapValue(value, map));
2445 }
2446 
2447 
2448 LInstruction* LChunkBuilder::DoLoadFieldByIndex(HLoadFieldByIndex* instr) {
2449  LOperand* object = UseRegister(instr->object());
2450  LOperand* index = UseTempRegister(instr->index());
2451  return DefineSameAsFirst(new(zone()) LLoadFieldByIndex(object, index));
2452 }
2453 
2454 
2455 } } // namespace v8::internal
2456 
2457 #endif // V8_TARGET_ARCH_IA32
HValue * LookupValue(int id) const
Definition: hydrogen.h:310
#define DEFINE_COMPILE(type)
Definition: lithium-arm.cc:37
static LUnallocated * cast(LOperand *op)
Definition: lithium.h:196
static LGap * cast(LInstruction *instr)
Definition: lithium-arm.h:318
static LConstantOperand * Create(int index, Zone *zone)
Definition: lithium.h:263
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:305
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:219
Handle< Object > name() const
Definition: lithium-arm.h:1726
const char * ToCString(const v8::String::Utf8Value &value)
virtual LOperand * InputAt(int i)=0
int GetParameterStackSlot(int index) const
Definition: lithium-arm.cc:497
void PrintF(const char *format,...)
Definition: v8utils.cc:40
static String * cast(Object *obj)
virtual void PrintOutputOperandTo(StringStream *stream)
Definition: lithium-arm.cc:120
Token::Value op() const
Definition: lithium-arm.h:1117
void MarkSpilledDoubleRegister(int allocation_index, LOperand *spill_operand)
Definition: lithium-arm.cc:84
LParallelMove * GetOrCreateParallelMove(InnerPosition pos, Zone *zone)
Definition: lithium-arm.h:336
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:279
int ParameterAt(int index)
Definition: lithium-arm.cc:508
value format" "after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false, "print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false, "print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false, "report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true, "garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true, "flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true, "use incremental marking") DEFINE_bool(incremental_marking_steps, true, "do incremental marking steps") DEFINE_bool(trace_incremental_marking, false, "trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true, "Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false, "Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true, "use inline caching") DEFINE_bool(native_code_counters, false, "generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false, "Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true, "Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false, "Never perform compaction on full GC-testing only") DEFINE_bool(compact_code_space, true, "Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true, "Flush inline caches prior to mark compact collection and" "flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0, "Default seed for initializing random generator" "(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true, "allows verbose printing") DEFINE_bool(allow_natives_syntax, false, "allow natives syntax") DEFINE_bool(trace_sim, false, "Trace simulator execution") DEFINE_bool(check_icache, false, "Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0, "Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8, "Stack alingment in bytes in simulator(4 or 8, 8 is default)") DEFINE_bool(trace_exception, false, "print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false, "preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true, "randomize hashes to avoid predictable hash collisions" "(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0, "Fixed seed to use to hash property keys(0 means random)" "(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false, "activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true, "generate optimized regexp code") DEFINE_bool(testing_bool_flag, true, "testing_bool_flag") DEFINE_int(testing_int_flag, 13, "testing_int_flag") DEFINE_float(testing_float_flag, 2.5, "float-flag") DEFINE_string(testing_string_flag, "Hello, world!", "string-flag") DEFINE_int(testing_prng_seed, 42, "Seed used for threading test randomness") DEFINE_string(testing_serialization_file, "/tmp/serdes", "file in which to serialize heap") DEFINE_bool(help, false, "Print usage message, including flags, on console") DEFINE_bool(dump_counters, false, "Dump counters on exit") DEFINE_string(map_counters, "", "Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT, "Pass all remaining arguments to the script.Alias for\"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#43"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2#define FLAG_MODE_DEFINE_DEFAULTS#1"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flag-definitions.h"1#define FLAG_FULL(ftype, ctype, nam, def, cmt)#define FLAG_READONLY(ftype, ctype, nam, def, cmt)#define DEFINE_implication(whenflag, thenflag)#define DEFINE_bool(nam, def, cmt)#define DEFINE_int(nam, def, cmt)#define DEFINE_float(nam, def, cmt)#define DEFINE_string(nam, def, cmt)#define DEFINE_args(nam, def, cmt)#define FLAG DEFINE_bool(use_strict, false,"enforce strict mode") DEFINE_bool(es5_readonly, false,"activate correct semantics for inheriting readonliness") DEFINE_bool(es52_globals, false,"activate new semantics for global var declarations") DEFINE_bool(harmony_typeof, false,"enable harmony semantics for typeof") DEFINE_bool(harmony_scoping, false,"enable harmony block scoping") DEFINE_bool(harmony_modules, false,"enable harmony modules (implies block scoping)") DEFINE_bool(harmony_proxies, false,"enable harmony proxies") DEFINE_bool(harmony_collections, false,"enable harmony collections (sets, maps, and weak maps)") DEFINE_bool(harmony, false,"enable all harmony features (except typeof)") DEFINE_implication(harmony, harmony_scoping) DEFINE_implication(harmony, harmony_modules) DEFINE_implication(harmony, harmony_proxies) DEFINE_implication(harmony, harmony_collections) DEFINE_implication(harmony_modules, harmony_scoping) DEFINE_bool(packed_arrays, false,"optimizes arrays that have no holes") DEFINE_bool(smi_only_arrays, true,"tracks arrays with only smi values") DEFINE_bool(clever_optimizations, true,"Optimize object size, Array shift, DOM strings and string +") DEFINE_bool(unbox_double_arrays, true,"automatically unbox arrays of doubles") DEFINE_bool(string_slices, true,"use string slices") DEFINE_bool(crankshaft, true,"use crankshaft") DEFINE_string(hydrogen_filter,"","optimization filter") DEFINE_bool(use_range, true,"use hydrogen range analysis") DEFINE_bool(eliminate_dead_phis, true,"eliminate dead phis") DEFINE_bool(use_gvn, true,"use hydrogen global value numbering") DEFINE_bool(use_canonicalizing, true,"use hydrogen instruction canonicalizing") DEFINE_bool(use_inlining, true,"use function inlining") DEFINE_int(max_inlined_source_size, 600,"maximum source size in bytes considered for a single inlining") DEFINE_int(max_inlined_nodes, 196,"maximum number of AST nodes considered for a single inlining") DEFINE_int(max_inlined_nodes_cumulative, 196,"maximum cumulative number of AST nodes considered for inlining") DEFINE_bool(loop_invariant_code_motion, true,"loop invariant code motion") DEFINE_bool(collect_megamorphic_maps_from_stub_cache, true,"crankshaft harvests type feedback from stub cache") DEFINE_bool(hydrogen_stats, false,"print statistics for hydrogen") DEFINE_bool(trace_hydrogen, false,"trace generated hydrogen to file") DEFINE_string(trace_phase,"Z","trace generated IR for specified phases") DEFINE_bool(trace_inlining, false,"trace inlining decisions") DEFINE_bool(trace_alloc, false,"trace register allocator") DEFINE_bool(trace_all_uses, false,"trace all use positions") DEFINE_bool(trace_range, false,"trace range analysis") DEFINE_bool(trace_gvn, false,"trace global value numbering") DEFINE_bool(trace_representation, false,"trace representation types") DEFINE_bool(stress_pointer_maps, false,"pointer map for every instruction") DEFINE_bool(stress_environments, false,"environment for every instruction") DEFINE_int(deopt_every_n_times, 0,"deoptimize every n times a deopt point is passed") DEFINE_bool(trap_on_deopt, false,"put a break point before deoptimizing") DEFINE_bool(deoptimize_uncommon_cases, true,"deoptimize uncommon cases") DEFINE_bool(polymorphic_inlining, true,"polymorphic inlining") DEFINE_bool(use_osr, true,"use on-stack replacement") DEFINE_bool(array_bounds_checks_elimination, false,"perform array bounds checks elimination") DEFINE_bool(array_index_dehoisting, false,"perform array index dehoisting") DEFINE_bool(trace_osr, false,"trace on-stack replacement") DEFINE_int(stress_runs, 0,"number of stress runs") DEFINE_bool(optimize_closures, true,"optimize closures") DEFINE_bool(inline_construct, true,"inline constructor calls") DEFINE_bool(inline_arguments, true,"inline functions with arguments object") DEFINE_int(loop_weight, 1,"loop weight for representation inference") DEFINE_bool(optimize_for_in, true,"optimize functions containing for-in loops") DEFINE_bool(experimental_profiler, true,"enable all profiler experiments") DEFINE_bool(watch_ic_patching, false,"profiler considers IC stability") DEFINE_int(frame_count, 1,"number of stack frames inspected by the profiler") DEFINE_bool(self_optimization, false,"primitive functions trigger their own optimization") DEFINE_bool(direct_self_opt, false,"call recompile stub directly when self-optimizing") DEFINE_bool(retry_self_opt, false,"re-try self-optimization if it failed") DEFINE_bool(count_based_interrupts, false,"trigger profiler ticks based on counting instead of timing") DEFINE_bool(interrupt_at_exit, false,"insert an interrupt check at function exit") DEFINE_bool(weighted_back_edges, false,"weight back edges by jump distance for interrupt triggering") DEFINE_int(interrupt_budget, 5900,"execution budget before interrupt is triggered") DEFINE_int(type_info_threshold, 15,"percentage of ICs that must have type info to allow optimization") DEFINE_int(self_opt_count, 130,"call count before self-optimization") DEFINE_implication(experimental_profiler, watch_ic_patching) DEFINE_implication(experimental_profiler, self_optimization) DEFINE_implication(experimental_profiler, retry_self_opt) DEFINE_implication(experimental_profiler, count_based_interrupts) DEFINE_implication(experimental_profiler, interrupt_at_exit) DEFINE_implication(experimental_profiler, weighted_back_edges) DEFINE_bool(trace_opt_verbose, false,"extra verbose compilation tracing") DEFINE_implication(trace_opt_verbose, trace_opt) DEFINE_bool(debug_code, false,"generate extra code (assertions) for debugging") DEFINE_bool(code_comments, false,"emit comments in code disassembly") DEFINE_bool(enable_sse2, true,"enable use of SSE2 instructions if available") DEFINE_bool(enable_sse3, true,"enable use of SSE3 instructions if available") DEFINE_bool(enable_sse4_1, true,"enable use of SSE4.1 instructions if available") DEFINE_bool(enable_cmov, true,"enable use of CMOV instruction if available") DEFINE_bool(enable_rdtsc, true,"enable use of RDTSC instruction if available") DEFINE_bool(enable_sahf, true,"enable use of SAHF instruction if available (X64 only)") DEFINE_bool(enable_vfp3, true,"enable use of VFP3 instructions if available - this implies ""enabling ARMv7 instructions (ARM only)") DEFINE_bool(enable_armv7, true,"enable use of ARMv7 instructions if available (ARM only)") DEFINE_bool(enable_fpu, true,"enable use of MIPS FPU instructions if available (MIPS only)") DEFINE_string(expose_natives_as, NULL,"expose natives in global object") DEFINE_string(expose_debug_as, NULL,"expose debug in global object") DEFINE_bool(expose_gc, false,"expose gc extension") DEFINE_bool(expose_externalize_string, false,"expose externalize string extension") DEFINE_int(stack_trace_limit, 10,"number of stack frames to capture") DEFINE_bool(builtins_in_stack_traces, false,"show built-in functions in stack traces") DEFINE_bool(disable_native_files, false,"disable builtin natives files") DEFINE_bool(inline_new, true,"use fast inline allocation") DEFINE_bool(stack_trace_on_abort, true,"print a stack trace if an assertion failure occurs") DEFINE_bool(trace, false,"trace function calls") DEFINE_bool(mask_constants_with_cookie, true,"use random jit cookie to mask large constants") DEFINE_bool(lazy, true,"use lazy compilation") DEFINE_bool(trace_opt, false,"trace lazy optimization") DEFINE_bool(trace_opt_stats, false,"trace lazy optimization statistics") DEFINE_bool(opt, true,"use adaptive optimizations") DEFINE_bool(always_opt, false,"always try to optimize functions") DEFINE_bool(prepare_always_opt, false,"prepare for turning on always opt") DEFINE_bool(trace_deopt, false,"trace deoptimization") DEFINE_int(min_preparse_length, 1024,"minimum length for automatic enable preparsing") DEFINE_bool(always_full_compiler, false,"try to use the dedicated run-once backend for all code") DEFINE_bool(trace_bailout, false,"print reasons for falling back to using the classic V8 backend") DEFINE_bool(compilation_cache, true,"enable compilation cache") DEFINE_bool(cache_prototype_transitions, true,"cache prototype transitions") DEFINE_bool(trace_debug_json, false,"trace debugging JSON request/response") DEFINE_bool(debugger_auto_break, true,"automatically set the debug break flag when debugger commands are ""in the queue") DEFINE_bool(enable_liveedit, true,"enable liveedit experimental feature") DEFINE_bool(break_on_abort, true,"always cause a debug break before aborting") DEFINE_int(stack_size, kPointerSize *123,"default size of stack region v8 is allowed to use (in kBytes)") DEFINE_int(max_stack_trace_source_length, 300,"maximum length of function source code printed in a stack trace.") DEFINE_bool(always_inline_smi_code, false,"always inline smi code in non-opt code") DEFINE_int(max_new_space_size, 0,"max size of the new generation (in kBytes)") DEFINE_int(max_old_space_size, 0,"max size of the old generation (in Mbytes)") DEFINE_int(max_executable_size, 0,"max size of executable memory (in Mbytes)") DEFINE_bool(gc_global, false,"always perform global GCs") DEFINE_int(gc_interval,-1,"garbage collect after <n> allocations") DEFINE_bool(trace_gc, false,"print one trace line following each garbage collection") DEFINE_bool(trace_gc_nvp, false,"print one detailed trace line in name=value format ""after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false,"print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false,"print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false,"report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true,"garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true,"flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true,"use incremental marking") DEFINE_bool(incremental_marking_steps, true,"do incremental marking steps") DEFINE_bool(trace_incremental_marking, false,"trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true,"Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false,"Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true,"use inline caching") DEFINE_bool(native_code_counters, false,"generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false,"Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true,"Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false,"Never perform compaction on full GC - testing only") DEFINE_bool(compact_code_space, true,"Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true,"Flush inline caches prior to mark compact collection and ""flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0,"Default seed for initializing random generator ""(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true,"allows verbose printing") DEFINE_bool(allow_natives_syntax, false,"allow natives syntax") DEFINE_bool(trace_sim, false,"Trace simulator execution") DEFINE_bool(check_icache, false,"Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0,"Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8,"Stack alingment in bytes in simulator (4 or 8, 8 is default)") DEFINE_bool(trace_exception, false,"print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false,"preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true,"randomize hashes to avoid predictable hash collisions ""(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0,"Fixed seed to use to hash property keys (0 means random)""(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false,"activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true,"generate optimized regexp code") DEFINE_bool(testing_bool_flag, true,"testing_bool_flag") DEFINE_int(testing_int_flag, 13,"testing_int_flag") DEFINE_float(testing_float_flag, 2.5,"float-flag") DEFINE_string(testing_string_flag,"Hello, world!","string-flag") DEFINE_int(testing_prng_seed, 42,"Seed used for threading test randomness") DEFINE_string(testing_serialization_file,"/tmp/serdes","file in which to serialize heap") DEFINE_bool(help, false,"Print usage message, including flags, on console") DEFINE_bool(dump_counters, false,"Dump counters on exit") DEFINE_string(map_counters,"","Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT,"Pass all remaining arguments to the script. Alias for \"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#47"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2 namespace{struct Flag{enum FlagType{TYPE_BOOL, TYPE_INT, TYPE_FLOAT, TYPE_STRING, TYPE_ARGS} name
Definition: flags.cc:1349
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:226
LLabel(HBasicBlock *block)
Definition: lithium-arm.h:400
Handle< String > name() const
Definition: lithium-arm.h:1542
static const int kNoNumber
Definition: ast.h:197
static const int kNumAllocatableRegisters
static bool IsSupported(CpuFeature f)
Handle< Object > name() const
Definition: lithium-arm.h:1705
LEnvironment * environment() const
Definition: lithium-arm.h:240
Token::Value op() const
Definition: lithium-arm.h:610
#define ASSERT(condition)
Definition: checks.h:270
virtual const char * Mnemonic() const =0
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:111
void PrintTo(StringStream *stream)
Definition: lithium.cc:203
bool IsSimpleMapChangeTransition(ElementsKind from_kind, ElementsKind to_kind)
#define LITHIUM_CONCRETE_INSTRUCTION_LIST(V)
Definition: lithium-arm.h:49
Representation representation() const
EqualityKind kind() const
Definition: lithium-arm.h:668
LGap * GetGapAt(int index) const
Definition: lithium-arm.cc:515
const Register edi
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:368
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:293
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:299
virtual bool HasResult() const =0
#define UNREACHABLE()
Definition: checks.h:50
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:324
int GetNextSpillIndex(bool is_double)
Definition: lithium-arm.cc:420
void PrintTo(StringStream *stream)
Definition: lithium.cc:158
Zone * zone() const
Definition: hydrogen.h:250
const Register eax
LLabel * replacement() const
Definition: lithium-arm.h:410
virtual const char * Mnemonic() const
Definition: lithium-arm.cc:156
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:201
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:330
void MarkSpilledRegister(int allocation_index, LOperand *spill_operand)
Definition: lithium-arm.cc:54
const XMMRegister xmm1
LOperand * GetNextSpillSlot(bool is_double)
Definition: lithium-arm.cc:427
void AddMove(LOperand *from, LOperand *to, Zone *zone)
Definition: lithium.h:401
static const char * String(Value tok)
Definition: token.h:275
const int kPointerSize
Definition: globals.h:234
static LDoubleStackSlot * Create(int index, Zone *zone)
Definition: lithium.h:324
const Register ecx
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:336
bool HasEnvironment() const
Definition: lithium-arm.h:241
static void VPrint(const char *format, va_list args)
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:341
virtual LOperand * result()=0
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:233
static int ToAllocationIndex(Register reg)
Definition: assembler-arm.h:77
Zone * zone() const
Definition: lithium-arm.h:2275
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:395
virtual void PrintTo(StringStream *stream)
Definition: lithium-arm.cc:92
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:125
static LStackSlot * Create(int index, Zone *zone)
Definition: lithium.h:299
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:359
static const int kMaxFixedIndex
Definition: lithium.h:157
const XMMRegister xmm3
bool IsGapAt(int index) const
Definition: lithium-arm.cc:520
LOsrEntry()
Definition: lithium-arm.cc:44
LPointerMap * pointer_map() const
Definition: lithium-arm.h:244
const ZoneList< HBasicBlock * > * blocks() const
Definition: hydrogen.h:252
static int ToAllocationIndex(XMMRegister reg)
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:312
LLabel * GetLabel(int block_id) const
Definition: lithium-arm.h:2249
virtual DECLARE_CONCRETE_INSTRUCTION(StringCompareAndBranch,"string-compare-and-branch") Token void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:247
void AddInstruction(LInstruction *instruction, HBasicBlock *block)
Definition: lithium-arm.cc:473
HGraph * graph() const
Definition: lithium-arm.h:2241
const Register ebx
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:145
int block_id() const
Definition: lithium-arm.h:369
void PrintDataTo(StringStream *stream) const
Definition: lithium.cc:137
virtual const char * Mnemonic() const
Definition: lithium-arm.cc:170
CompilationInfo * info() const
Definition: lithium-arm.h:2240
#define UNIMPLEMENTED()
Definition: checks.h:48
static const int kNumAllocatableRegisters
Definition: assembler-arm.h:74
Token::Value op() const
Definition: lithium-arm.h:1140
#define ASSERT_EQ(v1, v2)
Definition: checks.h:271
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination trace on stack replacement optimize closures functions with arguments object optimize functions containing for in loops profiler considers IC stability primitive functions trigger their own optimization re try self optimization if it failed insert an interrupt check at function exit execution budget before interrupt is triggered call count before self optimization self_optimization count_based_interrupts weighted_back_edges trace_opt emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 enable use of ARMv7 instructions if enable use of MIPS FPU instructions if NULL
Definition: flags.cc:274
const Register esi
void AddGapMove(int index, LOperand *from, LOperand *to)
Definition: lithium-arm.cc:531
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:210
void USE(T)
Definition: globals.h:303
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:190
Handle< String > name() const
Definition: lithium-arm.h:1516
LConstantOperand * DefineConstantOperand(HConstant *constant)
Definition: lithium-arm.cc:492
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:195
Representation LookupLiteralRepresentation(LConstantOperand *operand) const
Definition: lithium-arm.cc:542
bool HasPointerMap() const
Definition: lithium-arm.h:245
int NearestGapPos(int index) const
Definition: lithium-arm.cc:525
bool IsRedundant() const
Definition: lithium-arm.cc:134
const XMMRegister xmm2
const Register edx
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:319
virtual int InputCount()=0
static HValue * cast(HValue *value)
Handle< String > type_literal()
Definition: lithium-arm.h:2088
FlagType type() const
Definition: flags.cc:1358
void PrintTo(StringStream *stream)
Definition: lithium.cc:35
Handle< Object > LookupLiteral(LConstantOperand *operand) const
Definition: lithium-arm.cc:537
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:288
const ZoneList< LInstruction * > * instructions() const
Definition: lithium-arm.h:2242
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:348
virtual void PrintDataTo(StringStream *stream)