v8  3.11.10(node0.8.26)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
lithium-arm.cc
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "v8.h"
29 
30 #include "lithium-allocator-inl.h"
31 #include "arm/lithium-arm.h"
33 
34 namespace v8 {
35 namespace internal {
36 
37 #define DEFINE_COMPILE(type) \
38  void L##type::CompileToNative(LCodeGen* generator) { \
39  generator->Do##type(this); \
40  }
42 #undef DEFINE_COMPILE
43 
45  for (int i = 0; i < Register::kNumAllocatableRegisters; ++i) {
46  register_spills_[i] = NULL;
47  }
48  for (int i = 0; i < DoubleRegister::kNumAllocatableRegisters; ++i) {
49  double_register_spills_[i] = NULL;
50  }
51 }
52 
53 
54 void LOsrEntry::MarkSpilledRegister(int allocation_index,
55  LOperand* spill_operand) {
56  ASSERT(spill_operand->IsStackSlot());
57  ASSERT(register_spills_[allocation_index] == NULL);
58  register_spills_[allocation_index] = spill_operand;
59 }
60 
61 
62 #ifdef DEBUG
63 void LInstruction::VerifyCall() {
64  // Call instructions can use only fixed registers as temporaries and
65  // outputs because all registers are blocked by the calling convention.
66  // Inputs operands must use a fixed register or use-at-start policy or
67  // a non-register policy.
68  ASSERT(Output() == NULL ||
69  LUnallocated::cast(Output())->HasFixedPolicy() ||
70  !LUnallocated::cast(Output())->HasRegisterPolicy());
71  for (UseIterator it(this); !it.Done(); it.Advance()) {
72  LUnallocated* operand = LUnallocated::cast(it.Current());
73  ASSERT(operand->HasFixedPolicy() ||
74  operand->IsUsedAtStart());
75  }
76  for (TempIterator it(this); !it.Done(); it.Advance()) {
77  LUnallocated* operand = LUnallocated::cast(it.Current());
78  ASSERT(operand->HasFixedPolicy() ||!operand->HasRegisterPolicy());
79  }
80 }
81 #endif
82 
83 
84 void LOsrEntry::MarkSpilledDoubleRegister(int allocation_index,
85  LOperand* spill_operand) {
86  ASSERT(spill_operand->IsDoubleStackSlot());
87  ASSERT(double_register_spills_[allocation_index] == NULL);
88  double_register_spills_[allocation_index] = spill_operand;
89 }
90 
91 
93  stream->Add("%s ", this->Mnemonic());
94 
95  PrintOutputOperandTo(stream);
96 
97  PrintDataTo(stream);
98 
99  if (HasEnvironment()) {
100  stream->Add(" ");
101  environment()->PrintTo(stream);
102  }
103 
104  if (HasPointerMap()) {
105  stream->Add(" ");
106  pointer_map()->PrintTo(stream);
107  }
108 }
109 
110 
112  stream->Add("= ");
113  for (int i = 0; i < InputCount(); i++) {
114  if (i > 0) stream->Add(" ");
115  InputAt(i)->PrintTo(stream);
116  }
117 }
118 
119 
121  if (HasResult()) result()->PrintTo(stream);
122 }
123 
124 
126  LGap::PrintDataTo(stream);
127  LLabel* rep = replacement();
128  if (rep != NULL) {
129  stream->Add(" Dead block replaced with B%d", rep->block_id());
130  }
131 }
132 
133 
134 bool LGap::IsRedundant() const {
135  for (int i = 0; i < 4; i++) {
136  if (parallel_moves_[i] != NULL && !parallel_moves_[i]->IsRedundant()) {
137  return false;
138  }
139  }
140 
141  return true;
142 }
143 
144 
146  for (int i = 0; i < 4; i++) {
147  stream->Add("(");
148  if (parallel_moves_[i] != NULL) {
149  parallel_moves_[i]->PrintDataTo(stream);
150  }
151  stream->Add(") ");
152  }
153 }
154 
155 
156 const char* LArithmeticD::Mnemonic() const {
157  switch (op()) {
158  case Token::ADD: return "add-d";
159  case Token::SUB: return "sub-d";
160  case Token::MUL: return "mul-d";
161  case Token::DIV: return "div-d";
162  case Token::MOD: return "mod-d";
163  default:
164  UNREACHABLE();
165  return NULL;
166  }
167 }
168 
169 
170 const char* LArithmeticT::Mnemonic() const {
171  switch (op()) {
172  case Token::ADD: return "add-t";
173  case Token::SUB: return "sub-t";
174  case Token::MUL: return "mul-t";
175  case Token::MOD: return "mod-t";
176  case Token::DIV: return "div-t";
177  case Token::BIT_AND: return "bit-and-t";
178  case Token::BIT_OR: return "bit-or-t";
179  case Token::BIT_XOR: return "bit-xor-t";
180  case Token::SHL: return "shl-t";
181  case Token::SAR: return "sar-t";
182  case Token::SHR: return "shr-t";
183  default:
184  UNREACHABLE();
185  return NULL;
186  }
187 }
188 
189 
191  stream->Add("B%d", block_id());
192 }
193 
194 
196  stream->Add("B%d | B%d on ", true_block_id(), false_block_id());
197  InputAt(0)->PrintTo(stream);
198 }
199 
200 
202  stream->Add("if ");
203  InputAt(0)->PrintTo(stream);
204  stream->Add(" %s ", Token::String(op()));
205  InputAt(1)->PrintTo(stream);
206  stream->Add(" then B%d else B%d", true_block_id(), false_block_id());
207 }
208 
209 
211  stream->Add("if ");
212  InputAt(0)->PrintTo(stream);
213  stream->Add(kind() == kStrictEquality ? " === " : " == ");
214  stream->Add(nil() == kNullValue ? "null" : "undefined");
215  stream->Add(" then B%d else B%d", true_block_id(), false_block_id());
216 }
217 
218 
220  stream->Add("if is_object(");
221  InputAt(0)->PrintTo(stream);
222  stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
223 }
224 
225 
227  stream->Add("if is_string(");
228  InputAt(0)->PrintTo(stream);
229  stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
230 }
231 
232 
234  stream->Add("if is_smi(");
235  InputAt(0)->PrintTo(stream);
236  stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
237 }
238 
239 
241  stream->Add("if is_undetectable(");
242  InputAt(0)->PrintTo(stream);
243  stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
244 }
245 
246 
248  stream->Add("if string_compare(");
249  InputAt(0)->PrintTo(stream);
250  InputAt(1)->PrintTo(stream);
251  stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
252 }
253 
254 
256  stream->Add("if has_instance_type(");
257  InputAt(0)->PrintTo(stream);
258  stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
259 }
260 
261 
262 void LHasCachedArrayIndexAndBranch::PrintDataTo(StringStream* stream) {
263  stream->Add("if has_cached_array_index(");
264  InputAt(0)->PrintTo(stream);
265  stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
266 }
267 
268 
269 void LClassOfTestAndBranch::PrintDataTo(StringStream* stream) {
270  stream->Add("if class_of_test(");
271  InputAt(0)->PrintTo(stream);
272  stream->Add(", \"%o\") then B%d else B%d",
273  *hydrogen()->class_name(),
274  true_block_id(),
275  false_block_id());
276 }
277 
278 
280  stream->Add("if typeof ");
281  InputAt(0)->PrintTo(stream);
282  stream->Add(" == \"%s\" then B%d else B%d",
283  *hydrogen()->type_literal()->ToCString(),
285 }
286 
287 
289  stream->Add("#%d / ", arity());
290 }
291 
292 
294  stream->Add("/%s ", hydrogen()->OpName());
295  InputAt(0)->PrintTo(stream);
296 }
297 
298 
300  InputAt(0)->PrintTo(stream);
301  stream->Add("[%d]", slot_index());
302 }
303 
304 
306  InputAt(0)->PrintTo(stream);
307  stream->Add("[%d] <- ", slot_index());
308  InputAt(1)->PrintTo(stream);
309 }
310 
311 
313  stream->Add("= ");
314  InputAt(0)->PrintTo(stream);
315  stream->Add(" #%d / ", arity());
316 }
317 
318 
320  stream->Add("[r2] #%d / ", arity());
321 }
322 
323 
325  SmartArrayPointer<char> name_string = name()->ToCString();
326  stream->Add("%s #%d / ", *name_string, arity());
327 }
328 
329 
331  SmartArrayPointer<char> name_string = name()->ToCString();
332  stream->Add("%s #%d / ", *name_string, arity());
333 }
334 
335 
337  stream->Add("#%d / ", arity());
338 }
339 
340 
342  stream->Add("= ");
343  InputAt(0)->PrintTo(stream);
344  stream->Add(" #%d / ", arity());
345 }
346 
347 
349  arguments()->PrintTo(stream);
350 
351  stream->Add(" length ");
352  length()->PrintTo(stream);
353 
354  stream->Add(" index ");
355  index()->PrintTo(stream);
356 }
357 
358 
360  object()->PrintTo(stream);
361  stream->Add(".");
362  stream->Add(*String::cast(*name())->ToCString());
363  stream->Add(" <- ");
364  value()->PrintTo(stream);
365 }
366 
367 
369  object()->PrintTo(stream);
370  stream->Add(".");
371  stream->Add(*String::cast(*name())->ToCString());
372  stream->Add(" <- ");
373  value()->PrintTo(stream);
374 }
375 
376 
378  object()->PrintTo(stream);
379  stream->Add("[");
380  key()->PrintTo(stream);
381  stream->Add("] <- ");
382  value()->PrintTo(stream);
383 }
384 
385 
386 void LStoreKeyedFastDoubleElement::PrintDataTo(StringStream* stream) {
387  elements()->PrintTo(stream);
388  stream->Add("[");
389  key()->PrintTo(stream);
390  stream->Add("] <- ");
391  value()->PrintTo(stream);
392 }
393 
394 
396  object()->PrintTo(stream);
397  stream->Add("[");
398  key()->PrintTo(stream);
399  stream->Add("] <- ");
400  value()->PrintTo(stream);
401 }
402 
403 
405  object()->PrintTo(stream);
406  stream->Add(" %p -> %p", *original_map(), *transitioned_map());
407 }
408 
409 
410 LChunk::LChunk(CompilationInfo* info, HGraph* graph)
411  : spill_slot_count_(0),
412  info_(info),
413  graph_(graph),
414  instructions_(32, graph->zone()),
415  pointer_maps_(8, graph->zone()),
416  inlined_closures_(1, graph->zone()) {
417 }
418 
419 
420 int LChunk::GetNextSpillIndex(bool is_double) {
421  // Skip a slot if for a double-width slot.
422  if (is_double) spill_slot_count_++;
423  return spill_slot_count_++;
424 }
425 
426 
428  int index = GetNextSpillIndex(is_double);
429  if (is_double) {
430  return LDoubleStackSlot::Create(index, zone());
431  } else {
432  return LStackSlot::Create(index, zone());
433  }
434 }
435 
436 
438  HPhase phase("L_Mark empty blocks", this);
439  for (int i = 0; i < graph()->blocks()->length(); ++i) {
440  HBasicBlock* block = graph()->blocks()->at(i);
441  int first = block->first_instruction_index();
442  int last = block->last_instruction_index();
443  LInstruction* first_instr = instructions()->at(first);
444  LInstruction* last_instr = instructions()->at(last);
445 
446  LLabel* label = LLabel::cast(first_instr);
447  if (last_instr->IsGoto()) {
448  LGoto* goto_instr = LGoto::cast(last_instr);
449  if (label->IsRedundant() &&
450  !label->is_loop_header()) {
451  bool can_eliminate = true;
452  for (int i = first + 1; i < last && can_eliminate; ++i) {
453  LInstruction* cur = instructions()->at(i);
454  if (cur->IsGap()) {
455  LGap* gap = LGap::cast(cur);
456  if (!gap->IsRedundant()) {
457  can_eliminate = false;
458  }
459  } else {
460  can_eliminate = false;
461  }
462  }
463 
464  if (can_eliminate) {
465  label->set_replacement(GetLabel(goto_instr->block_id()));
466  }
467  }
468  }
469  }
470 }
471 
472 
474  LInstructionGap* gap = new(graph_->zone()) LInstructionGap(block);
475  int index = -1;
476  if (instr->IsControl()) {
477  instructions_.Add(gap, zone());
478  index = instructions_.length();
479  instructions_.Add(instr, zone());
480  } else {
481  index = instructions_.length();
482  instructions_.Add(instr, zone());
483  instructions_.Add(gap, zone());
484  }
485  if (instr->HasPointerMap()) {
486  pointer_maps_.Add(instr->pointer_map(), zone());
487  instr->pointer_map()->set_lithium_position(index);
488  }
489 }
490 
491 
493  return LConstantOperand::Create(constant->id(), zone());
494 }
495 
496 
497 int LChunk::GetParameterStackSlot(int index) const {
498  // The receiver is at index 0, the first parameter at index 1, so we
499  // shift all parameter indexes down by the number of parameters, and
500  // make sure they end up negative so they are distinguishable from
501  // spill slots.
502  int result = index - info()->scope()->num_parameters() - 1;
503  ASSERT(result < 0);
504  return result;
505 }
506 
507 // A parameter relative to ebp in the arguments stub.
508 int LChunk::ParameterAt(int index) {
509  ASSERT(-1 <= index); // -1 is the receiver.
510  return (1 + info()->scope()->num_parameters() - index) *
511  kPointerSize;
512 }
513 
514 
515 LGap* LChunk::GetGapAt(int index) const {
516  return LGap::cast(instructions_[index]);
517 }
518 
519 
520 bool LChunk::IsGapAt(int index) const {
521  return instructions_[index]->IsGap();
522 }
523 
524 
525 int LChunk::NearestGapPos(int index) const {
526  while (!IsGapAt(index)) index--;
527  return index;
528 }
529 
530 
531 void LChunk::AddGapMove(int index, LOperand* from, LOperand* to) {
533  LGap::START, zone())->AddMove(from, to, zone());
534 }
535 
536 
538  return HConstant::cast(graph_->LookupValue(operand->index()))->handle();
539 }
540 
541 
543  LConstantOperand* operand) const {
544  return graph_->LookupValue(operand->index())->representation();
545 }
546 
547 
548 LChunk* LChunkBuilder::Build() {
549  ASSERT(is_unused());
550  chunk_ = new(zone()) LChunk(info(), graph());
551  HPhase phase("L_Building chunk", chunk_);
552  status_ = BUILDING;
553  const ZoneList<HBasicBlock*>* blocks = graph()->blocks();
554  for (int i = 0; i < blocks->length(); i++) {
555  HBasicBlock* next = NULL;
556  if (i < blocks->length() - 1) next = blocks->at(i + 1);
557  DoBasicBlock(blocks->at(i), next);
558  if (is_aborted()) return NULL;
559  }
560  status_ = DONE;
561  return chunk_;
562 }
563 
564 
565 void LChunkBuilder::Abort(const char* format, ...) {
566  if (FLAG_trace_bailout) {
567  SmartArrayPointer<char> name(
568  info()->shared_info()->DebugName()->ToCString());
569  PrintF("Aborting LChunk building in @\"%s\": ", *name);
570  va_list arguments;
571  va_start(arguments, format);
572  OS::VPrint(format, arguments);
573  va_end(arguments);
574  PrintF("\n");
575  }
576  status_ = ABORTED;
577 }
578 
579 
580 LUnallocated* LChunkBuilder::ToUnallocated(Register reg) {
581  return new(zone()) LUnallocated(LUnallocated::FIXED_REGISTER,
583 }
584 
585 
586 LUnallocated* LChunkBuilder::ToUnallocated(DoubleRegister reg) {
587  return new(zone()) LUnallocated(LUnallocated::FIXED_DOUBLE_REGISTER,
589 }
590 
591 
592 LOperand* LChunkBuilder::UseFixed(HValue* value, Register fixed_register) {
593  return Use(value, ToUnallocated(fixed_register));
594 }
595 
596 
597 LOperand* LChunkBuilder::UseFixedDouble(HValue* value, DoubleRegister reg) {
598  return Use(value, ToUnallocated(reg));
599 }
600 
601 
602 LOperand* LChunkBuilder::UseRegister(HValue* value) {
603  return Use(value, new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER));
604 }
605 
606 
607 LOperand* LChunkBuilder::UseRegisterAtStart(HValue* value) {
608  return Use(value,
609  new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER,
611 }
612 
613 
614 LOperand* LChunkBuilder::UseTempRegister(HValue* value) {
615  return Use(value, new(zone()) LUnallocated(LUnallocated::WRITABLE_REGISTER));
616 }
617 
618 
619 LOperand* LChunkBuilder::Use(HValue* value) {
620  return Use(value, new(zone()) LUnallocated(LUnallocated::NONE));
621 }
622 
623 
624 LOperand* LChunkBuilder::UseAtStart(HValue* value) {
625  return Use(value, new(zone()) LUnallocated(LUnallocated::NONE,
627 }
628 
629 
630 LOperand* LChunkBuilder::UseOrConstant(HValue* value) {
631  return value->IsConstant()
632  ? chunk_->DefineConstantOperand(HConstant::cast(value))
633  : Use(value);
634 }
635 
636 
637 LOperand* LChunkBuilder::UseOrConstantAtStart(HValue* value) {
638  return value->IsConstant()
639  ? chunk_->DefineConstantOperand(HConstant::cast(value))
640  : UseAtStart(value);
641 }
642 
643 
644 LOperand* LChunkBuilder::UseRegisterOrConstant(HValue* value) {
645  return value->IsConstant()
646  ? chunk_->DefineConstantOperand(HConstant::cast(value))
647  : UseRegister(value);
648 }
649 
650 
651 LOperand* LChunkBuilder::UseRegisterOrConstantAtStart(HValue* value) {
652  return value->IsConstant()
653  ? chunk_->DefineConstantOperand(HConstant::cast(value))
654  : UseRegisterAtStart(value);
655 }
656 
657 
658 LOperand* LChunkBuilder::UseAny(HValue* value) {
659  return value->IsConstant()
660  ? chunk_->DefineConstantOperand(HConstant::cast(value))
661  : Use(value, new(zone()) LUnallocated(LUnallocated::ANY));
662 }
663 
664 
665 LOperand* LChunkBuilder::Use(HValue* value, LUnallocated* operand) {
666  if (value->EmitAtUses()) {
667  HInstruction* instr = HInstruction::cast(value);
668  VisitInstruction(instr);
669  }
670  operand->set_virtual_register(value->id());
671  return operand;
672 }
673 
674 
675 template<int I, int T>
676 LInstruction* LChunkBuilder::Define(LTemplateInstruction<1, I, T>* instr,
677  LUnallocated* result) {
678  result->set_virtual_register(current_instruction_->id());
679  instr->set_result(result);
680  return instr;
681 }
682 
683 
684 template<int I, int T>
685 LInstruction* LChunkBuilder::DefineAsRegister(
686  LTemplateInstruction<1, I, T>* instr) {
687  return Define(instr,
688  new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER));
689 }
690 
691 
692 template<int I, int T>
693 LInstruction* LChunkBuilder::DefineAsSpilled(
694  LTemplateInstruction<1, I, T>* instr, int index) {
695  return Define(instr,
696  new(zone()) LUnallocated(LUnallocated::FIXED_SLOT, index));
697 }
698 
699 
700 template<int I, int T>
701 LInstruction* LChunkBuilder::DefineSameAsFirst(
702  LTemplateInstruction<1, I, T>* instr) {
703  return Define(instr,
704  new(zone()) LUnallocated(LUnallocated::SAME_AS_FIRST_INPUT));
705 }
706 
707 
708 template<int I, int T>
709 LInstruction* LChunkBuilder::DefineFixed(
710  LTemplateInstruction<1, I, T>* instr, Register reg) {
711  return Define(instr, ToUnallocated(reg));
712 }
713 
714 
715 template<int I, int T>
716 LInstruction* LChunkBuilder::DefineFixedDouble(
717  LTemplateInstruction<1, I, T>* instr, DoubleRegister reg) {
718  return Define(instr, ToUnallocated(reg));
719 }
720 
721 
722 LInstruction* LChunkBuilder::AssignEnvironment(LInstruction* instr) {
723  HEnvironment* hydrogen_env = current_block_->last_environment();
724  int argument_index_accumulator = 0;
725  instr->set_environment(CreateEnvironment(hydrogen_env,
726  &argument_index_accumulator));
727  return instr;
728 }
729 
730 
731 LInstruction* LChunkBuilder::MarkAsCall(LInstruction* instr,
732  HInstruction* hinstr,
733  CanDeoptimize can_deoptimize) {
734 #ifdef DEBUG
735  instr->VerifyCall();
736 #endif
737  instr->MarkAsCall();
738  instr = AssignPointerMap(instr);
739 
740  if (hinstr->HasObservableSideEffects()) {
741  ASSERT(hinstr->next()->IsSimulate());
742  HSimulate* sim = HSimulate::cast(hinstr->next());
743  ASSERT(instruction_pending_deoptimization_environment_ == NULL);
744  ASSERT(pending_deoptimization_ast_id_ == AstNode::kNoNumber);
745  instruction_pending_deoptimization_environment_ = instr;
746  pending_deoptimization_ast_id_ = sim->ast_id();
747  }
748 
749  // If instruction does not have side-effects lazy deoptimization
750  // after the call will try to deoptimize to the point before the call.
751  // Thus we still need to attach environment to this call even if
752  // call sequence can not deoptimize eagerly.
753  bool needs_environment =
754  (can_deoptimize == CAN_DEOPTIMIZE_EAGERLY) ||
755  !hinstr->HasObservableSideEffects();
756  if (needs_environment && !instr->HasEnvironment()) {
757  instr = AssignEnvironment(instr);
758  }
759 
760  return instr;
761 }
762 
763 
764 LInstruction* LChunkBuilder::AssignPointerMap(LInstruction* instr) {
765  ASSERT(!instr->HasPointerMap());
766  instr->set_pointer_map(new(zone()) LPointerMap(position_, zone()));
767  return instr;
768 }
769 
770 
771 LUnallocated* LChunkBuilder::TempRegister() {
772  LUnallocated* operand =
773  new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER);
774  operand->set_virtual_register(allocator_->GetVirtualRegister());
775  if (!allocator_->AllocationOk()) Abort("Not enough virtual registers.");
776  return operand;
777 }
778 
779 
780 LOperand* LChunkBuilder::FixedTemp(Register reg) {
781  LUnallocated* operand = ToUnallocated(reg);
782  ASSERT(operand->HasFixedPolicy());
783  return operand;
784 }
785 
786 
787 LOperand* LChunkBuilder::FixedTemp(DoubleRegister reg) {
788  LUnallocated* operand = ToUnallocated(reg);
789  ASSERT(operand->HasFixedPolicy());
790  return operand;
791 }
792 
793 
794 LInstruction* LChunkBuilder::DoBlockEntry(HBlockEntry* instr) {
795  return new(zone()) LLabel(instr->block());
796 }
797 
798 
799 LInstruction* LChunkBuilder::DoSoftDeoptimize(HSoftDeoptimize* instr) {
800  return AssignEnvironment(new(zone()) LDeoptimize);
801 }
802 
803 
804 LInstruction* LChunkBuilder::DoDeoptimize(HDeoptimize* instr) {
805  return AssignEnvironment(new(zone()) LDeoptimize);
806 }
807 
808 
809 LInstruction* LChunkBuilder::DoShift(Token::Value op,
810  HBitwiseBinaryOperation* instr) {
811  if (instr->representation().IsTagged()) {
812  ASSERT(instr->left()->representation().IsTagged());
813  ASSERT(instr->right()->representation().IsTagged());
814 
815  LOperand* left = UseFixed(instr->left(), r1);
816  LOperand* right = UseFixed(instr->right(), r0);
817  LArithmeticT* result = new(zone()) LArithmeticT(op, left, right);
818  return MarkAsCall(DefineFixed(result, r0), instr);
819  }
820 
821  ASSERT(instr->representation().IsInteger32());
822  ASSERT(instr->left()->representation().IsInteger32());
823  ASSERT(instr->right()->representation().IsInteger32());
824  LOperand* left = UseRegisterAtStart(instr->left());
825 
826  HValue* right_value = instr->right();
827  LOperand* right = NULL;
828  int constant_value = 0;
829  if (right_value->IsConstant()) {
830  HConstant* constant = HConstant::cast(right_value);
831  right = chunk_->DefineConstantOperand(constant);
832  constant_value = constant->Integer32Value() & 0x1f;
833  } else {
834  right = UseRegisterAtStart(right_value);
835  }
836 
837  // Shift operations can only deoptimize if we do a logical shift
838  // by 0 and the result cannot be truncated to int32.
839  bool may_deopt = (op == Token::SHR && constant_value == 0);
840  bool does_deopt = false;
841  if (may_deopt) {
842  for (HUseIterator it(instr->uses()); !it.Done(); it.Advance()) {
843  if (!it.value()->CheckFlag(HValue::kTruncatingToInt32)) {
844  does_deopt = true;
845  break;
846  }
847  }
848  }
849 
850  LInstruction* result =
851  DefineAsRegister(new(zone()) LShiftI(op, left, right, does_deopt));
852  return does_deopt ? AssignEnvironment(result) : result;
853 }
854 
855 
856 LInstruction* LChunkBuilder::DoArithmeticD(Token::Value op,
857  HArithmeticBinaryOperation* instr) {
858  ASSERT(instr->representation().IsDouble());
859  ASSERT(instr->left()->representation().IsDouble());
860  ASSERT(instr->right()->representation().IsDouble());
861  ASSERT(op != Token::MOD);
862  LOperand* left = UseRegisterAtStart(instr->left());
863  LOperand* right = UseRegisterAtStart(instr->right());
864  LArithmeticD* result = new(zone()) LArithmeticD(op, left, right);
865  return DefineAsRegister(result);
866 }
867 
868 
869 LInstruction* LChunkBuilder::DoArithmeticT(Token::Value op,
870  HArithmeticBinaryOperation* instr) {
871  ASSERT(op == Token::ADD ||
872  op == Token::DIV ||
873  op == Token::MOD ||
874  op == Token::MUL ||
875  op == Token::SUB);
876  HValue* left = instr->left();
877  HValue* right = instr->right();
878  ASSERT(left->representation().IsTagged());
879  ASSERT(right->representation().IsTagged());
880  LOperand* left_operand = UseFixed(left, r1);
881  LOperand* right_operand = UseFixed(right, r0);
882  LArithmeticT* result =
883  new(zone()) LArithmeticT(op, left_operand, right_operand);
884  return MarkAsCall(DefineFixed(result, r0), instr);
885 }
886 
887 
888 void LChunkBuilder::DoBasicBlock(HBasicBlock* block, HBasicBlock* next_block) {
889  ASSERT(is_building());
890  current_block_ = block;
891  next_block_ = next_block;
892  if (block->IsStartBlock()) {
893  block->UpdateEnvironment(graph_->start_environment());
894  argument_count_ = 0;
895  } else if (block->predecessors()->length() == 1) {
896  // We have a single predecessor => copy environment and outgoing
897  // argument count from the predecessor.
898  ASSERT(block->phis()->length() == 0);
899  HBasicBlock* pred = block->predecessors()->at(0);
900  HEnvironment* last_environment = pred->last_environment();
901  ASSERT(last_environment != NULL);
902  // Only copy the environment, if it is later used again.
903  if (pred->end()->SecondSuccessor() == NULL) {
904  ASSERT(pred->end()->FirstSuccessor() == block);
905  } else {
906  if (pred->end()->FirstSuccessor()->block_id() > block->block_id() ||
907  pred->end()->SecondSuccessor()->block_id() > block->block_id()) {
908  last_environment = last_environment->Copy();
909  }
910  }
911  block->UpdateEnvironment(last_environment);
912  ASSERT(pred->argument_count() >= 0);
913  argument_count_ = pred->argument_count();
914  } else {
915  // We are at a state join => process phis.
916  HBasicBlock* pred = block->predecessors()->at(0);
917  // No need to copy the environment, it cannot be used later.
918  HEnvironment* last_environment = pred->last_environment();
919  for (int i = 0; i < block->phis()->length(); ++i) {
920  HPhi* phi = block->phis()->at(i);
921  last_environment->SetValueAt(phi->merged_index(), phi);
922  }
923  for (int i = 0; i < block->deleted_phis()->length(); ++i) {
924  last_environment->SetValueAt(block->deleted_phis()->at(i),
925  graph_->GetConstantUndefined());
926  }
927  block->UpdateEnvironment(last_environment);
928  // Pick up the outgoing argument count of one of the predecessors.
929  argument_count_ = pred->argument_count();
930  }
931  HInstruction* current = block->first();
932  int start = chunk_->instructions()->length();
933  while (current != NULL && !is_aborted()) {
934  // Code for constants in registers is generated lazily.
935  if (!current->EmitAtUses()) {
936  VisitInstruction(current);
937  }
938  current = current->next();
939  }
940  int end = chunk_->instructions()->length() - 1;
941  if (end >= start) {
942  block->set_first_instruction_index(start);
943  block->set_last_instruction_index(end);
944  }
945  block->set_argument_count(argument_count_);
946  next_block_ = NULL;
947  current_block_ = NULL;
948 }
949 
950 
951 void LChunkBuilder::VisitInstruction(HInstruction* current) {
952  HInstruction* old_current = current_instruction_;
953  current_instruction_ = current;
954  if (current->has_position()) position_ = current->position();
955  LInstruction* instr = current->CompileToLithium(this);
956 
957  if (instr != NULL) {
958  if (FLAG_stress_pointer_maps && !instr->HasPointerMap()) {
959  instr = AssignPointerMap(instr);
960  }
961  if (FLAG_stress_environments && !instr->HasEnvironment()) {
962  instr = AssignEnvironment(instr);
963  }
964  instr->set_hydrogen_value(current);
965  chunk_->AddInstruction(instr, current_block_);
966  }
967  current_instruction_ = old_current;
968 }
969 
970 
971 LEnvironment* LChunkBuilder::CreateEnvironment(
972  HEnvironment* hydrogen_env,
973  int* argument_index_accumulator) {
974  if (hydrogen_env == NULL) return NULL;
975 
976  LEnvironment* outer =
977  CreateEnvironment(hydrogen_env->outer(), argument_index_accumulator);
978  int ast_id = hydrogen_env->ast_id();
979  ASSERT(ast_id != AstNode::kNoNumber ||
980  hydrogen_env->frame_type() != JS_FUNCTION);
981  int value_count = hydrogen_env->length();
982  LEnvironment* result = new(zone()) LEnvironment(
983  hydrogen_env->closure(),
984  hydrogen_env->frame_type(),
985  ast_id,
986  hydrogen_env->parameter_count(),
987  argument_count_,
988  value_count,
989  outer,
990  zone());
991  int argument_index = *argument_index_accumulator;
992  for (int i = 0; i < value_count; ++i) {
993  if (hydrogen_env->is_special_index(i)) continue;
994 
995  HValue* value = hydrogen_env->values()->at(i);
996  LOperand* op = NULL;
997  if (value->IsArgumentsObject()) {
998  op = NULL;
999  } else if (value->IsPushArgument()) {
1000  op = new(zone()) LArgument(argument_index++);
1001  } else {
1002  op = UseAny(value);
1003  }
1004  result->AddValue(op, value->representation());
1005  }
1006 
1007  if (hydrogen_env->frame_type() == JS_FUNCTION) {
1008  *argument_index_accumulator = argument_index;
1009  }
1010 
1011  return result;
1012 }
1013 
1014 
1015 LInstruction* LChunkBuilder::DoGoto(HGoto* instr) {
1016  return new(zone()) LGoto(instr->FirstSuccessor()->block_id());
1017 }
1018 
1019 
1020 LInstruction* LChunkBuilder::DoBranch(HBranch* instr) {
1021  HValue* value = instr->value();
1022  if (value->EmitAtUses()) {
1023  HBasicBlock* successor = HConstant::cast(value)->ToBoolean()
1024  ? instr->FirstSuccessor()
1025  : instr->SecondSuccessor();
1026  return new(zone()) LGoto(successor->block_id());
1027  }
1028 
1029  LBranch* result = new(zone()) LBranch(UseRegister(value));
1030  // Tagged values that are not known smis or booleans require a
1031  // deoptimization environment.
1032  Representation rep = value->representation();
1033  HType type = value->type();
1034  if (rep.IsTagged() && !type.IsSmi() && !type.IsBoolean()) {
1035  return AssignEnvironment(result);
1036  }
1037  return result;
1038 }
1039 
1040 
1041 
1042 LInstruction* LChunkBuilder::DoCompareMap(HCompareMap* instr) {
1043  ASSERT(instr->value()->representation().IsTagged());
1044  LOperand* value = UseRegisterAtStart(instr->value());
1045  LOperand* temp = TempRegister();
1046  return new(zone()) LCmpMapAndBranch(value, temp);
1047 }
1048 
1049 
1050 LInstruction* LChunkBuilder::DoArgumentsLength(HArgumentsLength* instr) {
1051  LOperand* value = UseRegister(instr->value());
1052  return DefineAsRegister(new(zone()) LArgumentsLength(value));
1053 }
1054 
1055 
1056 LInstruction* LChunkBuilder::DoArgumentsElements(HArgumentsElements* elems) {
1057  return DefineAsRegister(new(zone()) LArgumentsElements);
1058 }
1059 
1060 
1061 LInstruction* LChunkBuilder::DoInstanceOf(HInstanceOf* instr) {
1062  LInstanceOf* result =
1063  new(zone()) LInstanceOf(UseFixed(instr->left(), r0),
1064  UseFixed(instr->right(), r1));
1065  return MarkAsCall(DefineFixed(result, r0), instr);
1066 }
1067 
1068 
1069 LInstruction* LChunkBuilder::DoInstanceOfKnownGlobal(
1070  HInstanceOfKnownGlobal* instr) {
1071  LInstanceOfKnownGlobal* result =
1072  new(zone()) LInstanceOfKnownGlobal(UseFixed(instr->left(), r0),
1073  FixedTemp(r4));
1074  return MarkAsCall(DefineFixed(result, r0), instr);
1075 }
1076 
1077 
1078 LInstruction* LChunkBuilder::DoWrapReceiver(HWrapReceiver* instr) {
1079  LOperand* receiver = UseRegisterAtStart(instr->receiver());
1080  LOperand* function = UseRegisterAtStart(instr->function());
1081  LWrapReceiver* result = new(zone()) LWrapReceiver(receiver, function);
1082  return AssignEnvironment(DefineSameAsFirst(result));
1083 }
1084 
1085 
1086 LInstruction* LChunkBuilder::DoApplyArguments(HApplyArguments* instr) {
1087  LOperand* function = UseFixed(instr->function(), r1);
1088  LOperand* receiver = UseFixed(instr->receiver(), r0);
1089  LOperand* length = UseFixed(instr->length(), r2);
1090  LOperand* elements = UseFixed(instr->elements(), r3);
1091  LApplyArguments* result = new(zone()) LApplyArguments(function,
1092  receiver,
1093  length,
1094  elements);
1095  return MarkAsCall(DefineFixed(result, r0), instr, CAN_DEOPTIMIZE_EAGERLY);
1096 }
1097 
1098 
1099 LInstruction* LChunkBuilder::DoPushArgument(HPushArgument* instr) {
1100  ++argument_count_;
1101  LOperand* argument = Use(instr->argument());
1102  return new(zone()) LPushArgument(argument);
1103 }
1104 
1105 
1106 LInstruction* LChunkBuilder::DoThisFunction(HThisFunction* instr) {
1107  return instr->HasNoUses()
1108  ? NULL
1109  : DefineAsRegister(new(zone()) LThisFunction);
1110 }
1111 
1112 
1113 LInstruction* LChunkBuilder::DoContext(HContext* instr) {
1114  return instr->HasNoUses() ? NULL : DefineAsRegister(new(zone()) LContext);
1115 }
1116 
1117 
1118 LInstruction* LChunkBuilder::DoOuterContext(HOuterContext* instr) {
1119  LOperand* context = UseRegisterAtStart(instr->value());
1120  return DefineAsRegister(new(zone()) LOuterContext(context));
1121 }
1122 
1123 
1124 LInstruction* LChunkBuilder::DoDeclareGlobals(HDeclareGlobals* instr) {
1125  return MarkAsCall(new(zone()) LDeclareGlobals, instr);
1126 }
1127 
1128 
1129 LInstruction* LChunkBuilder::DoGlobalObject(HGlobalObject* instr) {
1130  LOperand* context = UseRegisterAtStart(instr->value());
1131  return DefineAsRegister(new(zone()) LGlobalObject(context));
1132 }
1133 
1134 
1135 LInstruction* LChunkBuilder::DoGlobalReceiver(HGlobalReceiver* instr) {
1136  LOperand* global_object = UseRegisterAtStart(instr->value());
1137  return DefineAsRegister(new(zone()) LGlobalReceiver(global_object));
1138 }
1139 
1140 
1141 LInstruction* LChunkBuilder::DoCallConstantFunction(
1142  HCallConstantFunction* instr) {
1143  argument_count_ -= instr->argument_count();
1144  return MarkAsCall(DefineFixed(new(zone()) LCallConstantFunction, r0), instr);
1145 }
1146 
1147 
1148 LInstruction* LChunkBuilder::DoInvokeFunction(HInvokeFunction* instr) {
1149  LOperand* function = UseFixed(instr->function(), r1);
1150  argument_count_ -= instr->argument_count();
1151  LInvokeFunction* result = new(zone()) LInvokeFunction(function);
1152  return MarkAsCall(DefineFixed(result, r0), instr, CANNOT_DEOPTIMIZE_EAGERLY);
1153 }
1154 
1155 
1156 LInstruction* LChunkBuilder::DoUnaryMathOperation(HUnaryMathOperation* instr) {
1157  BuiltinFunctionId op = instr->op();
1158  if (op == kMathLog || op == kMathSin || op == kMathCos || op == kMathTan) {
1159  LOperand* input = UseFixedDouble(instr->value(), d2);
1160  LUnaryMathOperation* result = new(zone()) LUnaryMathOperation(input, NULL);
1161  return MarkAsCall(DefineFixedDouble(result, d2), instr);
1162  } else if (op == kMathPowHalf) {
1163  LOperand* input = UseFixedDouble(instr->value(), d2);
1164  LOperand* temp = FixedTemp(d3);
1165  LUnaryMathOperation* result = new(zone()) LUnaryMathOperation(input, temp);
1166  return DefineFixedDouble(result, d2);
1167  } else {
1168  LOperand* input = UseRegisterAtStart(instr->value());
1169  LOperand* temp = (op == kMathFloor) ? TempRegister() : NULL;
1170  LUnaryMathOperation* result = new(zone()) LUnaryMathOperation(input, temp);
1171  switch (op) {
1172  case kMathAbs:
1173  return AssignEnvironment(AssignPointerMap(DefineAsRegister(result)));
1174  case kMathFloor:
1175  return AssignEnvironment(AssignPointerMap(DefineAsRegister(result)));
1176  case kMathSqrt:
1177  return DefineAsRegister(result);
1178  case kMathRound:
1179  return AssignEnvironment(DefineAsRegister(result));
1180  default:
1181  UNREACHABLE();
1182  return NULL;
1183  }
1184  }
1185 }
1186 
1187 
1188 LInstruction* LChunkBuilder::DoCallKeyed(HCallKeyed* instr) {
1189  ASSERT(instr->key()->representation().IsTagged());
1190  argument_count_ -= instr->argument_count();
1191  LOperand* key = UseFixed(instr->key(), r2);
1192  return MarkAsCall(DefineFixed(new(zone()) LCallKeyed(key), r0), instr);
1193 }
1194 
1195 
1196 LInstruction* LChunkBuilder::DoCallNamed(HCallNamed* instr) {
1197  argument_count_ -= instr->argument_count();
1198  return MarkAsCall(DefineFixed(new(zone()) LCallNamed, r0), instr);
1199 }
1200 
1201 
1202 LInstruction* LChunkBuilder::DoCallGlobal(HCallGlobal* instr) {
1203  argument_count_ -= instr->argument_count();
1204  return MarkAsCall(DefineFixed(new(zone()) LCallGlobal, r0), instr);
1205 }
1206 
1207 
1208 LInstruction* LChunkBuilder::DoCallKnownGlobal(HCallKnownGlobal* instr) {
1209  argument_count_ -= instr->argument_count();
1210  return MarkAsCall(DefineFixed(new(zone()) LCallKnownGlobal, r0), instr);
1211 }
1212 
1213 
1214 LInstruction* LChunkBuilder::DoCallNew(HCallNew* instr) {
1215  LOperand* constructor = UseFixed(instr->constructor(), r1);
1216  argument_count_ -= instr->argument_count();
1217  LCallNew* result = new(zone()) LCallNew(constructor);
1218  return MarkAsCall(DefineFixed(result, r0), instr);
1219 }
1220 
1221 
1222 LInstruction* LChunkBuilder::DoCallFunction(HCallFunction* instr) {
1223  LOperand* function = UseFixed(instr->function(), r1);
1224  argument_count_ -= instr->argument_count();
1225  return MarkAsCall(DefineFixed(new(zone()) LCallFunction(function), r0),
1226  instr);
1227 }
1228 
1229 
1230 LInstruction* LChunkBuilder::DoCallRuntime(HCallRuntime* instr) {
1231  argument_count_ -= instr->argument_count();
1232  return MarkAsCall(DefineFixed(new(zone()) LCallRuntime, r0), instr);
1233 }
1234 
1235 
1236 LInstruction* LChunkBuilder::DoShr(HShr* instr) {
1237  return DoShift(Token::SHR, instr);
1238 }
1239 
1240 
1241 LInstruction* LChunkBuilder::DoSar(HSar* instr) {
1242  return DoShift(Token::SAR, instr);
1243 }
1244 
1245 
1246 LInstruction* LChunkBuilder::DoShl(HShl* instr) {
1247  return DoShift(Token::SHL, instr);
1248 }
1249 
1250 
1251 LInstruction* LChunkBuilder::DoBitwise(HBitwise* instr) {
1252  if (instr->representation().IsInteger32()) {
1253  ASSERT(instr->left()->representation().IsInteger32());
1254  ASSERT(instr->right()->representation().IsInteger32());
1255 
1256  LOperand* left = UseRegisterAtStart(instr->LeastConstantOperand());
1257  LOperand* right = UseOrConstantAtStart(instr->MostConstantOperand());
1258  return DefineAsRegister(new(zone()) LBitI(left, right));
1259  } else {
1260  ASSERT(instr->representation().IsTagged());
1261  ASSERT(instr->left()->representation().IsTagged());
1262  ASSERT(instr->right()->representation().IsTagged());
1263 
1264  LOperand* left = UseFixed(instr->left(), r1);
1265  LOperand* right = UseFixed(instr->right(), r0);
1266  LArithmeticT* result = new(zone()) LArithmeticT(instr->op(), left, right);
1267  return MarkAsCall(DefineFixed(result, r0), instr);
1268  }
1269 }
1270 
1271 
1272 LInstruction* LChunkBuilder::DoBitNot(HBitNot* instr) {
1273  ASSERT(instr->value()->representation().IsInteger32());
1274  ASSERT(instr->representation().IsInteger32());
1275  if (instr->HasNoUses()) return NULL;
1276  LOperand* value = UseRegisterAtStart(instr->value());
1277  return DefineAsRegister(new(zone()) LBitNotI(value));
1278 }
1279 
1280 
1281 LInstruction* LChunkBuilder::DoDiv(HDiv* instr) {
1282  if (instr->representation().IsDouble()) {
1283  return DoArithmeticD(Token::DIV, instr);
1284  } else if (instr->representation().IsInteger32()) {
1285  // TODO(1042) The fixed register allocation
1286  // is needed because we call TypeRecordingBinaryOpStub from
1287  // the generated code, which requires registers r0
1288  // and r1 to be used. We should remove that
1289  // when we provide a native implementation.
1290  LOperand* dividend = UseFixed(instr->left(), r0);
1291  LOperand* divisor = UseFixed(instr->right(), r1);
1292  return AssignEnvironment(AssignPointerMap(
1293  DefineFixed(new(zone()) LDivI(dividend, divisor), r0)));
1294  } else {
1295  return DoArithmeticT(Token::DIV, instr);
1296  }
1297 }
1298 
1299 
1300 bool LChunkBuilder::HasMagicNumberForDivisor(int32_t divisor) {
1301  uint32_t divisor_abs = abs(divisor);
1302  // Dividing by 0, 1, and powers of 2 is easy.
1303  // Note that IsPowerOf2(0) returns true;
1304  ASSERT(IsPowerOf2(0) == true);
1305  if (IsPowerOf2(divisor_abs)) return true;
1306 
1307  // We have magic numbers for a few specific divisors.
1308  // Details and proofs can be found in:
1309  // - Hacker's Delight, Henry S. Warren, Jr.
1310  // - The PowerPC Compiler Writer’s Guide
1311  // and probably many others.
1312  //
1313  // We handle
1314  // <divisor with magic numbers> * <power of 2>
1315  // but not
1316  // <divisor with magic numbers> * <other divisor with magic numbers>
1317  int32_t power_of_2_factor =
1318  CompilerIntrinsics::CountTrailingZeros(divisor_abs);
1319  DivMagicNumbers magic_numbers =
1320  DivMagicNumberFor(divisor_abs >> power_of_2_factor);
1321  if (magic_numbers.M != InvalidDivMagicNumber.M) return true;
1322 
1323  return false;
1324 }
1325 
1326 
1327 HValue* LChunkBuilder::SimplifiedDividendForMathFloorOfDiv(HValue* dividend) {
1328  // A value with an integer representation does not need to be transformed.
1329  if (dividend->representation().IsInteger32()) {
1330  return dividend;
1331  // A change from an integer32 can be replaced by the integer32 value.
1332  } else if (dividend->IsChange() &&
1333  HChange::cast(dividend)->from().IsInteger32()) {
1334  return HChange::cast(dividend)->value();
1335  }
1336  return NULL;
1337 }
1338 
1339 
1340 HValue* LChunkBuilder::SimplifiedDivisorForMathFloorOfDiv(HValue* divisor) {
1341  // Only optimize when we have magic numbers for the divisor.
1342  // The standard integer division routine is usually slower than transitionning
1343  // to VFP.
1344  if (divisor->IsConstant() &&
1345  HConstant::cast(divisor)->HasInteger32Value()) {
1346  HConstant* constant_val = HConstant::cast(divisor);
1347  int32_t int32_val = constant_val->Integer32Value();
1348  if (LChunkBuilder::HasMagicNumberForDivisor(int32_val)) {
1349  return constant_val->CopyToRepresentation(Representation::Integer32(),
1350  divisor->block()->zone());
1351  }
1352  }
1353  return NULL;
1354 }
1355 
1356 
1357 LInstruction* LChunkBuilder::DoMathFloorOfDiv(HMathFloorOfDiv* instr) {
1358  HValue* right = instr->right();
1359  LOperand* dividend = UseRegister(instr->left());
1360  LOperand* divisor = UseRegisterOrConstant(right);
1361  LOperand* remainder = TempRegister();
1362  ASSERT(right->IsConstant() &&
1363  HConstant::cast(right)->HasInteger32Value() &&
1364  HasMagicNumberForDivisor(HConstant::cast(right)->Integer32Value()));
1365  return AssignEnvironment(DefineAsRegister(
1366  new(zone()) LMathFloorOfDiv(dividend, divisor, remainder)));
1367 }
1368 
1369 
1370 LInstruction* LChunkBuilder::DoMod(HMod* instr) {
1371  if (instr->representation().IsInteger32()) {
1372  ASSERT(instr->left()->representation().IsInteger32());
1373  ASSERT(instr->right()->representation().IsInteger32());
1374 
1375  LModI* mod;
1376  if (instr->HasPowerOf2Divisor()) {
1377  ASSERT(!instr->CheckFlag(HValue::kCanBeDivByZero));
1378  LOperand* value = UseRegisterAtStart(instr->left());
1379  mod = new(zone()) LModI(value, UseOrConstant(instr->right()));
1380  } else {
1381  LOperand* dividend = UseRegister(instr->left());
1382  LOperand* divisor = UseRegister(instr->right());
1383  mod = new(zone()) LModI(dividend,
1384  divisor,
1385  TempRegister(),
1386  FixedTemp(d10),
1387  FixedTemp(d11));
1388  }
1389 
1390  if (instr->CheckFlag(HValue::kBailoutOnMinusZero) ||
1391  instr->CheckFlag(HValue::kCanBeDivByZero)) {
1392  return AssignEnvironment(DefineAsRegister(mod));
1393  } else {
1394  return DefineAsRegister(mod);
1395  }
1396  } else if (instr->representation().IsTagged()) {
1397  return DoArithmeticT(Token::MOD, instr);
1398  } else {
1399  ASSERT(instr->representation().IsDouble());
1400  // We call a C function for double modulo. It can't trigger a GC.
1401  // We need to use fixed result register for the call.
1402  // TODO(fschneider): Allow any register as input registers.
1403  LOperand* left = UseFixedDouble(instr->left(), d1);
1404  LOperand* right = UseFixedDouble(instr->right(), d2);
1405  LArithmeticD* result = new(zone()) LArithmeticD(Token::MOD, left, right);
1406  return MarkAsCall(DefineFixedDouble(result, d1), instr);
1407  }
1408 }
1409 
1410 
1411 LInstruction* LChunkBuilder::DoMul(HMul* instr) {
1412  if (instr->representation().IsInteger32()) {
1413  ASSERT(instr->left()->representation().IsInteger32());
1414  ASSERT(instr->right()->representation().IsInteger32());
1415  LOperand* left;
1416  LOperand* right = UseOrConstant(instr->MostConstantOperand());
1417  LOperand* temp = NULL;
1418  if (instr->CheckFlag(HValue::kBailoutOnMinusZero) &&
1419  (instr->CheckFlag(HValue::kCanOverflow) ||
1420  !right->IsConstantOperand())) {
1421  left = UseRegister(instr->LeastConstantOperand());
1422  temp = TempRegister();
1423  } else {
1424  left = UseRegisterAtStart(instr->LeastConstantOperand());
1425  }
1426  LMulI* mul = new(zone()) LMulI(left, right, temp);
1427  if (instr->CheckFlag(HValue::kCanOverflow) ||
1428  instr->CheckFlag(HValue::kBailoutOnMinusZero)) {
1429  AssignEnvironment(mul);
1430  }
1431  return DefineAsRegister(mul);
1432 
1433  } else if (instr->representation().IsDouble()) {
1434  return DoArithmeticD(Token::MUL, instr);
1435 
1436  } else {
1437  return DoArithmeticT(Token::MUL, instr);
1438  }
1439 }
1440 
1441 
1442 LInstruction* LChunkBuilder::DoSub(HSub* instr) {
1443  if (instr->representation().IsInteger32()) {
1444  ASSERT(instr->left()->representation().IsInteger32());
1445  ASSERT(instr->right()->representation().IsInteger32());
1446  LOperand* left = UseRegisterAtStart(instr->left());
1447  LOperand* right = UseOrConstantAtStart(instr->right());
1448  LSubI* sub = new(zone()) LSubI(left, right);
1449  LInstruction* result = DefineAsRegister(sub);
1450  if (instr->CheckFlag(HValue::kCanOverflow)) {
1451  result = AssignEnvironment(result);
1452  }
1453  return result;
1454  } else if (instr->representation().IsDouble()) {
1455  return DoArithmeticD(Token::SUB, instr);
1456  } else {
1457  return DoArithmeticT(Token::SUB, instr);
1458  }
1459 }
1460 
1461 
1462 LInstruction* LChunkBuilder::DoAdd(HAdd* instr) {
1463  if (instr->representation().IsInteger32()) {
1464  ASSERT(instr->left()->representation().IsInteger32());
1465  ASSERT(instr->right()->representation().IsInteger32());
1466  LOperand* left = UseRegisterAtStart(instr->LeastConstantOperand());
1467  LOperand* right = UseOrConstantAtStart(instr->MostConstantOperand());
1468  LAddI* add = new(zone()) LAddI(left, right);
1469  LInstruction* result = DefineAsRegister(add);
1470  if (instr->CheckFlag(HValue::kCanOverflow)) {
1471  result = AssignEnvironment(result);
1472  }
1473  return result;
1474  } else if (instr->representation().IsDouble()) {
1475  return DoArithmeticD(Token::ADD, instr);
1476  } else {
1477  ASSERT(instr->representation().IsTagged());
1478  return DoArithmeticT(Token::ADD, instr);
1479  }
1480 }
1481 
1482 
1483 LInstruction* LChunkBuilder::DoPower(HPower* instr) {
1484  ASSERT(instr->representation().IsDouble());
1485  // We call a C function for double power. It can't trigger a GC.
1486  // We need to use fixed result register for the call.
1487  Representation exponent_type = instr->right()->representation();
1488  ASSERT(instr->left()->representation().IsDouble());
1489  LOperand* left = UseFixedDouble(instr->left(), d1);
1490  LOperand* right = exponent_type.IsDouble() ?
1491  UseFixedDouble(instr->right(), d2) :
1492  UseFixed(instr->right(), r2);
1493  LPower* result = new(zone()) LPower(left, right);
1494  return MarkAsCall(DefineFixedDouble(result, d3),
1495  instr,
1496  CAN_DEOPTIMIZE_EAGERLY);
1497 }
1498 
1499 
1500 LInstruction* LChunkBuilder::DoRandom(HRandom* instr) {
1501  ASSERT(instr->representation().IsDouble());
1502  ASSERT(instr->global_object()->representation().IsTagged());
1503  LOperand* global_object = UseFixed(instr->global_object(), r0);
1504  LRandom* result = new(zone()) LRandom(global_object);
1505  return MarkAsCall(DefineFixedDouble(result, d7), instr);
1506 }
1507 
1508 
1509 LInstruction* LChunkBuilder::DoCompareGeneric(HCompareGeneric* instr) {
1510  ASSERT(instr->left()->representation().IsTagged());
1511  ASSERT(instr->right()->representation().IsTagged());
1512  LOperand* left = UseFixed(instr->left(), r1);
1513  LOperand* right = UseFixed(instr->right(), r0);
1514  LCmpT* result = new(zone()) LCmpT(left, right);
1515  return MarkAsCall(DefineFixed(result, r0), instr);
1516 }
1517 
1518 
1519 LInstruction* LChunkBuilder::DoCompareIDAndBranch(
1520  HCompareIDAndBranch* instr) {
1521  Representation r = instr->GetInputRepresentation();
1522  if (r.IsInteger32()) {
1523  ASSERT(instr->left()->representation().IsInteger32());
1524  ASSERT(instr->right()->representation().IsInteger32());
1525  LOperand* left = UseRegisterOrConstantAtStart(instr->left());
1526  LOperand* right = UseRegisterOrConstantAtStart(instr->right());
1527  return new(zone()) LCmpIDAndBranch(left, right);
1528  } else {
1529  ASSERT(r.IsDouble());
1530  ASSERT(instr->left()->representation().IsDouble());
1531  ASSERT(instr->right()->representation().IsDouble());
1532  LOperand* left = UseRegisterAtStart(instr->left());
1533  LOperand* right = UseRegisterAtStart(instr->right());
1534  return new(zone()) LCmpIDAndBranch(left, right);
1535  }
1536 }
1537 
1538 
1539 LInstruction* LChunkBuilder::DoCompareObjectEqAndBranch(
1540  HCompareObjectEqAndBranch* instr) {
1541  LOperand* left = UseRegisterAtStart(instr->left());
1542  LOperand* right = UseRegisterAtStart(instr->right());
1543  return new(zone()) LCmpObjectEqAndBranch(left, right);
1544 }
1545 
1546 
1547 LInstruction* LChunkBuilder::DoCompareConstantEqAndBranch(
1548  HCompareConstantEqAndBranch* instr) {
1549  LOperand* value = UseRegisterAtStart(instr->value());
1550  return new(zone()) LCmpConstantEqAndBranch(value);
1551 }
1552 
1553 
1554 LInstruction* LChunkBuilder::DoIsNilAndBranch(HIsNilAndBranch* instr) {
1555  ASSERT(instr->value()->representation().IsTagged());
1556  return new(zone()) LIsNilAndBranch(UseRegisterAtStart(instr->value()));
1557 }
1558 
1559 
1560 LInstruction* LChunkBuilder::DoIsObjectAndBranch(HIsObjectAndBranch* instr) {
1561  ASSERT(instr->value()->representation().IsTagged());
1562  LOperand* value = UseRegisterAtStart(instr->value());
1563  LOperand* temp = TempRegister();
1564  return new(zone()) LIsObjectAndBranch(value, temp);
1565 }
1566 
1567 
1568 LInstruction* LChunkBuilder::DoIsStringAndBranch(HIsStringAndBranch* instr) {
1569  ASSERT(instr->value()->representation().IsTagged());
1570  LOperand* value = UseRegisterAtStart(instr->value());
1571  LOperand* temp = TempRegister();
1572  return new(zone()) LIsStringAndBranch(value, temp);
1573 }
1574 
1575 
1576 LInstruction* LChunkBuilder::DoIsSmiAndBranch(HIsSmiAndBranch* instr) {
1577  ASSERT(instr->value()->representation().IsTagged());
1578  return new(zone()) LIsSmiAndBranch(Use(instr->value()));
1579 }
1580 
1581 
1582 LInstruction* LChunkBuilder::DoIsUndetectableAndBranch(
1583  HIsUndetectableAndBranch* instr) {
1584  ASSERT(instr->value()->representation().IsTagged());
1585  LOperand* value = UseRegisterAtStart(instr->value());
1586  return new(zone()) LIsUndetectableAndBranch(value, TempRegister());
1587 }
1588 
1589 
1590 LInstruction* LChunkBuilder::DoStringCompareAndBranch(
1591  HStringCompareAndBranch* instr) {
1592  ASSERT(instr->left()->representation().IsTagged());
1593  ASSERT(instr->right()->representation().IsTagged());
1594  LOperand* left = UseFixed(instr->left(), r1);
1595  LOperand* right = UseFixed(instr->right(), r0);
1596  LStringCompareAndBranch* result =
1597  new(zone()) LStringCompareAndBranch(left, right);
1598  return MarkAsCall(result, instr);
1599 }
1600 
1601 
1602 LInstruction* LChunkBuilder::DoHasInstanceTypeAndBranch(
1603  HHasInstanceTypeAndBranch* instr) {
1604  ASSERT(instr->value()->representation().IsTagged());
1605  LOperand* value = UseRegisterAtStart(instr->value());
1606  return new(zone()) LHasInstanceTypeAndBranch(value);
1607 }
1608 
1609 
1610 LInstruction* LChunkBuilder::DoGetCachedArrayIndex(
1611  HGetCachedArrayIndex* instr) {
1612  ASSERT(instr->value()->representation().IsTagged());
1613  LOperand* value = UseRegisterAtStart(instr->value());
1614 
1615  return DefineAsRegister(new(zone()) LGetCachedArrayIndex(value));
1616 }
1617 
1618 
1619 LInstruction* LChunkBuilder::DoHasCachedArrayIndexAndBranch(
1620  HHasCachedArrayIndexAndBranch* instr) {
1621  ASSERT(instr->value()->representation().IsTagged());
1622  return new(zone()) LHasCachedArrayIndexAndBranch(
1623  UseRegisterAtStart(instr->value()));
1624 }
1625 
1626 
1627 LInstruction* LChunkBuilder::DoClassOfTestAndBranch(
1628  HClassOfTestAndBranch* instr) {
1629  ASSERT(instr->value()->representation().IsTagged());
1630  LOperand* value = UseRegister(instr->value());
1631  return new(zone()) LClassOfTestAndBranch(value, TempRegister());
1632 }
1633 
1634 
1635 LInstruction* LChunkBuilder::DoJSArrayLength(HJSArrayLength* instr) {
1636  LOperand* array = UseRegisterAtStart(instr->value());
1637  return DefineAsRegister(new(zone()) LJSArrayLength(array));
1638 }
1639 
1640 
1641 LInstruction* LChunkBuilder::DoFixedArrayBaseLength(
1642  HFixedArrayBaseLength* instr) {
1643  LOperand* array = UseRegisterAtStart(instr->value());
1644  return DefineAsRegister(new(zone()) LFixedArrayBaseLength(array));
1645 }
1646 
1647 
1648 LInstruction* LChunkBuilder::DoElementsKind(HElementsKind* instr) {
1649  LOperand* object = UseRegisterAtStart(instr->value());
1650  return DefineAsRegister(new(zone()) LElementsKind(object));
1651 }
1652 
1653 
1654 LInstruction* LChunkBuilder::DoValueOf(HValueOf* instr) {
1655  LOperand* object = UseRegister(instr->value());
1656  LValueOf* result = new(zone()) LValueOf(object, TempRegister());
1657  return DefineAsRegister(result);
1658 }
1659 
1660 
1661 LInstruction* LChunkBuilder::DoDateField(HDateField* instr) {
1662  LOperand* object = UseFixed(instr->value(), r0);
1663  LDateField* result =
1664  new(zone()) LDateField(object, FixedTemp(r1), instr->index());
1665  return MarkAsCall(DefineFixed(result, r0), instr);
1666 }
1667 
1668 
1669 LInstruction* LChunkBuilder::DoBoundsCheck(HBoundsCheck* instr) {
1670  LOperand* value = UseRegisterAtStart(instr->index());
1671  LOperand* length = UseRegister(instr->length());
1672  return AssignEnvironment(new(zone()) LBoundsCheck(value, length));
1673 }
1674 
1675 
1676 LInstruction* LChunkBuilder::DoAbnormalExit(HAbnormalExit* instr) {
1677  // The control instruction marking the end of a block that completed
1678  // abruptly (e.g., threw an exception). There is nothing specific to do.
1679  return NULL;
1680 }
1681 
1682 
1683 LInstruction* LChunkBuilder::DoThrow(HThrow* instr) {
1684  LOperand* value = UseFixed(instr->value(), r0);
1685  return MarkAsCall(new(zone()) LThrow(value), instr);
1686 }
1687 
1688 
1689 LInstruction* LChunkBuilder::DoUseConst(HUseConst* instr) {
1690  return NULL;
1691 }
1692 
1693 
1694 LInstruction* LChunkBuilder::DoForceRepresentation(HForceRepresentation* bad) {
1695  // All HForceRepresentation instructions should be eliminated in the
1696  // representation change phase of Hydrogen.
1697  UNREACHABLE();
1698  return NULL;
1699 }
1700 
1701 
1702 LInstruction* LChunkBuilder::DoChange(HChange* instr) {
1703  Representation from = instr->from();
1704  Representation to = instr->to();
1705  if (from.IsTagged()) {
1706  if (to.IsDouble()) {
1707  LOperand* value = UseRegister(instr->value());
1708  LNumberUntagD* res = new(zone()) LNumberUntagD(value);
1709  return AssignEnvironment(DefineAsRegister(res));
1710  } else {
1711  ASSERT(to.IsInteger32());
1712  LOperand* value = UseRegisterAtStart(instr->value());
1713  LInstruction* res = NULL;
1714  if (instr->value()->type().IsSmi()) {
1715  res = DefineAsRegister(new(zone()) LSmiUntag(value, false));
1716  } else {
1717  LOperand* temp1 = TempRegister();
1718  LOperand* temp2 = instr->CanTruncateToInt32() ? TempRegister()
1719  : NULL;
1720  LOperand* temp3 = instr->CanTruncateToInt32() ? FixedTemp(d11)
1721  : NULL;
1722  res = DefineSameAsFirst(new(zone()) LTaggedToI(value,
1723  temp1,
1724  temp2,
1725  temp3));
1726  res = AssignEnvironment(res);
1727  }
1728  return res;
1729  }
1730  } else if (from.IsDouble()) {
1731  if (to.IsTagged()) {
1732  LOperand* value = UseRegister(instr->value());
1733  LOperand* temp1 = TempRegister();
1734  LOperand* temp2 = TempRegister();
1735 
1736  // Make sure that the temp and result_temp registers are
1737  // different.
1738  LUnallocated* result_temp = TempRegister();
1739  LNumberTagD* result = new(zone()) LNumberTagD(value, temp1, temp2);
1740  Define(result, result_temp);
1741  return AssignPointerMap(result);
1742  } else {
1743  ASSERT(to.IsInteger32());
1744  LOperand* value = UseRegister(instr->value());
1745  LOperand* temp1 = TempRegister();
1746  LOperand* temp2 = instr->CanTruncateToInt32() ? TempRegister() : NULL;
1747  LDoubleToI* res = new(zone()) LDoubleToI(value, temp1, temp2);
1748  return AssignEnvironment(DefineAsRegister(res));
1749  }
1750  } else if (from.IsInteger32()) {
1751  if (to.IsTagged()) {
1752  HValue* val = instr->value();
1753  LOperand* value = UseRegisterAtStart(val);
1754  if (val->HasRange() && val->range()->IsInSmiRange()) {
1755  return DefineAsRegister(new(zone()) LSmiTag(value));
1756  } else {
1757  LNumberTagI* result = new(zone()) LNumberTagI(value);
1758  return AssignEnvironment(AssignPointerMap(DefineAsRegister(result)));
1759  }
1760  } else {
1761  ASSERT(to.IsDouble());
1762  LOperand* value = Use(instr->value());
1763  return DefineAsRegister(new(zone()) LInteger32ToDouble(value));
1764  }
1765  }
1766  UNREACHABLE();
1767  return NULL;
1768 }
1769 
1770 
1771 LInstruction* LChunkBuilder::DoCheckNonSmi(HCheckNonSmi* instr) {
1772  LOperand* value = UseRegisterAtStart(instr->value());
1773  return AssignEnvironment(new(zone()) LCheckNonSmi(value));
1774 }
1775 
1776 
1777 LInstruction* LChunkBuilder::DoCheckInstanceType(HCheckInstanceType* instr) {
1778  LOperand* value = UseRegisterAtStart(instr->value());
1779  LInstruction* result = new(zone()) LCheckInstanceType(value);
1780  return AssignEnvironment(result);
1781 }
1782 
1783 
1784 LInstruction* LChunkBuilder::DoCheckPrototypeMaps(HCheckPrototypeMaps* instr) {
1785  LOperand* temp1 = TempRegister();
1786  LOperand* temp2 = TempRegister();
1787  LInstruction* result = new(zone()) LCheckPrototypeMaps(temp1, temp2);
1788  return AssignEnvironment(result);
1789 }
1790 
1791 
1792 LInstruction* LChunkBuilder::DoCheckSmi(HCheckSmi* instr) {
1793  LOperand* value = UseRegisterAtStart(instr->value());
1794  return AssignEnvironment(new(zone()) LCheckSmi(value));
1795 }
1796 
1797 
1798 LInstruction* LChunkBuilder::DoCheckFunction(HCheckFunction* instr) {
1799  LOperand* value = UseRegisterAtStart(instr->value());
1800  return AssignEnvironment(new(zone()) LCheckFunction(value));
1801 }
1802 
1803 
1804 LInstruction* LChunkBuilder::DoCheckMaps(HCheckMaps* instr) {
1805  LOperand* value = UseRegisterAtStart(instr->value());
1806  LInstruction* result = new(zone()) LCheckMaps(value);
1807  return AssignEnvironment(result);
1808 }
1809 
1810 
1811 LInstruction* LChunkBuilder::DoClampToUint8(HClampToUint8* instr) {
1812  HValue* value = instr->value();
1813  Representation input_rep = value->representation();
1814  LOperand* reg = UseRegister(value);
1815  if (input_rep.IsDouble()) {
1816  return DefineAsRegister(new(zone()) LClampDToUint8(reg, FixedTemp(d11)));
1817  } else if (input_rep.IsInteger32()) {
1818  return DefineAsRegister(new(zone()) LClampIToUint8(reg));
1819  } else {
1820  ASSERT(input_rep.IsTagged());
1821  // Register allocator doesn't (yet) support allocation of double
1822  // temps. Reserve d1 explicitly.
1823  LClampTToUint8* result = new(zone()) LClampTToUint8(reg, FixedTemp(d11));
1824  return AssignEnvironment(DefineAsRegister(result));
1825  }
1826 }
1827 
1828 
1829 LInstruction* LChunkBuilder::DoReturn(HReturn* instr) {
1830  return new(zone()) LReturn(UseFixed(instr->value(), r0));
1831 }
1832 
1833 
1834 LInstruction* LChunkBuilder::DoConstant(HConstant* instr) {
1835  Representation r = instr->representation();
1836  if (r.IsInteger32()) {
1837  return DefineAsRegister(new(zone()) LConstantI);
1838  } else if (r.IsDouble()) {
1839  return DefineAsRegister(new(zone()) LConstantD);
1840  } else if (r.IsTagged()) {
1841  return DefineAsRegister(new(zone()) LConstantT);
1842  } else {
1843  UNREACHABLE();
1844  return NULL;
1845  }
1846 }
1847 
1848 
1849 LInstruction* LChunkBuilder::DoLoadGlobalCell(HLoadGlobalCell* instr) {
1850  LLoadGlobalCell* result = new(zone()) LLoadGlobalCell;
1851  return instr->RequiresHoleCheck()
1852  ? AssignEnvironment(DefineAsRegister(result))
1853  : DefineAsRegister(result);
1854 }
1855 
1856 
1857 LInstruction* LChunkBuilder::DoLoadGlobalGeneric(HLoadGlobalGeneric* instr) {
1858  LOperand* global_object = UseFixed(instr->global_object(), r0);
1859  LLoadGlobalGeneric* result = new(zone()) LLoadGlobalGeneric(global_object);
1860  return MarkAsCall(DefineFixed(result, r0), instr);
1861 }
1862 
1863 
1864 LInstruction* LChunkBuilder::DoStoreGlobalCell(HStoreGlobalCell* instr) {
1865  LOperand* value = UseRegister(instr->value());
1866  // Use a temp to check the value in the cell in the case where we perform
1867  // a hole check.
1868  return instr->RequiresHoleCheck()
1869  ? AssignEnvironment(new(zone()) LStoreGlobalCell(value, TempRegister()))
1870  : new(zone()) LStoreGlobalCell(value, NULL);
1871 }
1872 
1873 
1874 LInstruction* LChunkBuilder::DoStoreGlobalGeneric(HStoreGlobalGeneric* instr) {
1875  LOperand* global_object = UseFixed(instr->global_object(), r1);
1876  LOperand* value = UseFixed(instr->value(), r0);
1877  LStoreGlobalGeneric* result =
1878  new(zone()) LStoreGlobalGeneric(global_object, value);
1879  return MarkAsCall(result, instr);
1880 }
1881 
1882 
1883 LInstruction* LChunkBuilder::DoLoadContextSlot(HLoadContextSlot* instr) {
1884  LOperand* context = UseRegisterAtStart(instr->value());
1885  LInstruction* result =
1886  DefineAsRegister(new(zone()) LLoadContextSlot(context));
1887  return instr->RequiresHoleCheck() ? AssignEnvironment(result) : result;
1888 }
1889 
1890 
1891 LInstruction* LChunkBuilder::DoStoreContextSlot(HStoreContextSlot* instr) {
1892  LOperand* context;
1893  LOperand* value;
1894  if (instr->NeedsWriteBarrier()) {
1895  context = UseTempRegister(instr->context());
1896  value = UseTempRegister(instr->value());
1897  } else {
1898  context = UseRegister(instr->context());
1899  value = UseRegister(instr->value());
1900  }
1901  LInstruction* result = new(zone()) LStoreContextSlot(context, value);
1902  return instr->RequiresHoleCheck() ? AssignEnvironment(result) : result;
1903 }
1904 
1905 
1906 LInstruction* LChunkBuilder::DoLoadNamedField(HLoadNamedField* instr) {
1907  return DefineAsRegister(
1908  new(zone()) LLoadNamedField(UseRegisterAtStart(instr->object())));
1909 }
1910 
1911 
1912 LInstruction* LChunkBuilder::DoLoadNamedFieldPolymorphic(
1913  HLoadNamedFieldPolymorphic* instr) {
1914  ASSERT(instr->representation().IsTagged());
1915  if (instr->need_generic()) {
1916  LOperand* obj = UseFixed(instr->object(), r0);
1917  LLoadNamedFieldPolymorphic* result =
1918  new(zone()) LLoadNamedFieldPolymorphic(obj);
1919  return MarkAsCall(DefineFixed(result, r0), instr);
1920  } else {
1921  LOperand* obj = UseRegisterAtStart(instr->object());
1922  LLoadNamedFieldPolymorphic* result =
1923  new(zone()) LLoadNamedFieldPolymorphic(obj);
1924  return AssignEnvironment(DefineAsRegister(result));
1925  }
1926 }
1927 
1928 
1929 LInstruction* LChunkBuilder::DoLoadNamedGeneric(HLoadNamedGeneric* instr) {
1930  LOperand* object = UseFixed(instr->object(), r0);
1931  LInstruction* result = DefineFixed(new(zone()) LLoadNamedGeneric(object), r0);
1932  return MarkAsCall(result, instr);
1933 }
1934 
1935 
1936 LInstruction* LChunkBuilder::DoLoadFunctionPrototype(
1937  HLoadFunctionPrototype* instr) {
1938  return AssignEnvironment(DefineAsRegister(
1939  new(zone()) LLoadFunctionPrototype(UseRegister(instr->function()))));
1940 }
1941 
1942 
1943 LInstruction* LChunkBuilder::DoLoadElements(HLoadElements* instr) {
1944  LOperand* input = UseRegisterAtStart(instr->value());
1945  return DefineAsRegister(new(zone()) LLoadElements(input));
1946 }
1947 
1948 
1949 LInstruction* LChunkBuilder::DoLoadExternalArrayPointer(
1950  HLoadExternalArrayPointer* instr) {
1951  LOperand* input = UseRegisterAtStart(instr->value());
1952  return DefineAsRegister(new(zone()) LLoadExternalArrayPointer(input));
1953 }
1954 
1955 
1956 LInstruction* LChunkBuilder::DoLoadKeyedFastElement(
1957  HLoadKeyedFastElement* instr) {
1958  ASSERT(instr->representation().IsTagged());
1959  ASSERT(instr->key()->representation().IsInteger32());
1960  LOperand* obj = UseRegisterAtStart(instr->object());
1961  LOperand* key = UseRegisterAtStart(instr->key());
1962  LLoadKeyedFastElement* result = new(zone()) LLoadKeyedFastElement(obj, key);
1963  if (instr->RequiresHoleCheck()) AssignEnvironment(result);
1964  return DefineAsRegister(result);
1965 }
1966 
1967 
1968 LInstruction* LChunkBuilder::DoLoadKeyedFastDoubleElement(
1969  HLoadKeyedFastDoubleElement* instr) {
1970  ASSERT(instr->representation().IsDouble());
1971  ASSERT(instr->key()->representation().IsInteger32());
1972  LOperand* elements = UseTempRegister(instr->elements());
1973  LOperand* key = UseRegisterOrConstantAtStart(instr->key());
1974  LLoadKeyedFastDoubleElement* result =
1975  new(zone()) LLoadKeyedFastDoubleElement(elements, key);
1976  return AssignEnvironment(DefineAsRegister(result));
1977 }
1978 
1979 
1980 LInstruction* LChunkBuilder::DoLoadKeyedSpecializedArrayElement(
1981  HLoadKeyedSpecializedArrayElement* instr) {
1982  ElementsKind elements_kind = instr->elements_kind();
1983  ASSERT(
1984  (instr->representation().IsInteger32() &&
1985  (elements_kind != EXTERNAL_FLOAT_ELEMENTS) &&
1986  (elements_kind != EXTERNAL_DOUBLE_ELEMENTS)) ||
1987  (instr->representation().IsDouble() &&
1988  ((elements_kind == EXTERNAL_FLOAT_ELEMENTS) ||
1989  (elements_kind == EXTERNAL_DOUBLE_ELEMENTS))));
1990  ASSERT(instr->key()->representation().IsInteger32());
1991  LOperand* external_pointer = UseRegister(instr->external_pointer());
1992  LOperand* key = UseRegisterOrConstant(instr->key());
1993  LLoadKeyedSpecializedArrayElement* result =
1994  new(zone()) LLoadKeyedSpecializedArrayElement(external_pointer, key);
1995  LInstruction* load_instr = DefineAsRegister(result);
1996  // An unsigned int array load might overflow and cause a deopt, make sure it
1997  // has an environment.
1998  return (elements_kind == EXTERNAL_UNSIGNED_INT_ELEMENTS) ?
1999  AssignEnvironment(load_instr) : load_instr;
2000 }
2001 
2002 
2003 LInstruction* LChunkBuilder::DoLoadKeyedGeneric(HLoadKeyedGeneric* instr) {
2004  LOperand* object = UseFixed(instr->object(), r1);
2005  LOperand* key = UseFixed(instr->key(), r0);
2006 
2007  LInstruction* result =
2008  DefineFixed(new(zone()) LLoadKeyedGeneric(object, key), r0);
2009  return MarkAsCall(result, instr);
2010 }
2011 
2012 
2013 LInstruction* LChunkBuilder::DoStoreKeyedFastElement(
2014  HStoreKeyedFastElement* instr) {
2015  bool needs_write_barrier = instr->NeedsWriteBarrier();
2016  ASSERT(instr->value()->representation().IsTagged());
2017  ASSERT(instr->object()->representation().IsTagged());
2018  ASSERT(instr->key()->representation().IsInteger32());
2019 
2020  LOperand* obj = UseTempRegister(instr->object());
2021  LOperand* val = needs_write_barrier
2022  ? UseTempRegister(instr->value())
2023  : UseRegisterAtStart(instr->value());
2024  LOperand* key = needs_write_barrier
2025  ? UseTempRegister(instr->key())
2026  : UseRegisterOrConstantAtStart(instr->key());
2027  return new(zone()) LStoreKeyedFastElement(obj, key, val);
2028 }
2029 
2030 
2031 LInstruction* LChunkBuilder::DoStoreKeyedFastDoubleElement(
2032  HStoreKeyedFastDoubleElement* instr) {
2033  ASSERT(instr->value()->representation().IsDouble());
2034  ASSERT(instr->elements()->representation().IsTagged());
2035  ASSERT(instr->key()->representation().IsInteger32());
2036 
2037  LOperand* elements = UseRegisterAtStart(instr->elements());
2038  LOperand* val = UseTempRegister(instr->value());
2039  LOperand* key = UseRegisterOrConstantAtStart(instr->key());
2040 
2041  return new(zone()) LStoreKeyedFastDoubleElement(elements, key, val);
2042 }
2043 
2044 
2045 LInstruction* LChunkBuilder::DoStoreKeyedSpecializedArrayElement(
2046  HStoreKeyedSpecializedArrayElement* instr) {
2047  ElementsKind elements_kind = instr->elements_kind();
2048  ASSERT(
2049  (instr->value()->representation().IsInteger32() &&
2050  (elements_kind != EXTERNAL_FLOAT_ELEMENTS) &&
2051  (elements_kind != EXTERNAL_DOUBLE_ELEMENTS)) ||
2052  (instr->value()->representation().IsDouble() &&
2053  ((elements_kind == EXTERNAL_FLOAT_ELEMENTS) ||
2054  (elements_kind == EXTERNAL_DOUBLE_ELEMENTS))));
2055  ASSERT(instr->external_pointer()->representation().IsExternal());
2056  ASSERT(instr->key()->representation().IsInteger32());
2057 
2058  LOperand* external_pointer = UseRegister(instr->external_pointer());
2059  bool val_is_temp_register =
2060  elements_kind == EXTERNAL_PIXEL_ELEMENTS ||
2061  elements_kind == EXTERNAL_FLOAT_ELEMENTS;
2062  LOperand* val = val_is_temp_register
2063  ? UseTempRegister(instr->value())
2064  : UseRegister(instr->value());
2065  LOperand* key = UseRegisterOrConstant(instr->key());
2066 
2067  return new(zone()) LStoreKeyedSpecializedArrayElement(external_pointer,
2068  key,
2069  val);
2070 }
2071 
2072 
2073 LInstruction* LChunkBuilder::DoStoreKeyedGeneric(HStoreKeyedGeneric* instr) {
2074  LOperand* obj = UseFixed(instr->object(), r2);
2075  LOperand* key = UseFixed(instr->key(), r1);
2076  LOperand* val = UseFixed(instr->value(), r0);
2077 
2078  ASSERT(instr->object()->representation().IsTagged());
2079  ASSERT(instr->key()->representation().IsTagged());
2080  ASSERT(instr->value()->representation().IsTagged());
2081 
2082  return MarkAsCall(new(zone()) LStoreKeyedGeneric(obj, key, val), instr);
2083 }
2084 
2085 
2086 LInstruction* LChunkBuilder::DoTransitionElementsKind(
2087  HTransitionElementsKind* instr) {
2088  ElementsKind from_kind = instr->original_map()->elements_kind();
2089  ElementsKind to_kind = instr->transitioned_map()->elements_kind();
2090  if (IsSimpleMapChangeTransition(from_kind, to_kind)) {
2091  LOperand* object = UseRegister(instr->object());
2092  LOperand* new_map_reg = TempRegister();
2093  LTransitionElementsKind* result =
2094  new(zone()) LTransitionElementsKind(object, new_map_reg, NULL);
2095  return DefineSameAsFirst(result);
2096  } else {
2097  LOperand* object = UseFixed(instr->object(), r0);
2098  LOperand* fixed_object_reg = FixedTemp(r2);
2099  LOperand* new_map_reg = FixedTemp(r3);
2100  LTransitionElementsKind* result =
2101  new(zone()) LTransitionElementsKind(object,
2102  new_map_reg,
2103  fixed_object_reg);
2104  return MarkAsCall(DefineFixed(result, r0), instr);
2105  }
2106 }
2107 
2108 
2109 LInstruction* LChunkBuilder::DoStoreNamedField(HStoreNamedField* instr) {
2110  bool needs_write_barrier = instr->NeedsWriteBarrier();
2111  bool needs_write_barrier_for_map = !instr->transition().is_null() &&
2112  instr->NeedsWriteBarrierForMap();
2113 
2114  LOperand* obj;
2115  if (needs_write_barrier) {
2116  obj = instr->is_in_object()
2117  ? UseRegister(instr->object())
2118  : UseTempRegister(instr->object());
2119  } else {
2120  obj = needs_write_barrier_for_map
2121  ? UseRegister(instr->object())
2122  : UseRegisterAtStart(instr->object());
2123  }
2124 
2125  LOperand* val = needs_write_barrier
2126  ? UseTempRegister(instr->value())
2127  : UseRegister(instr->value());
2128 
2129  // We need a temporary register for write barrier of the map field.
2130  LOperand* temp = needs_write_barrier_for_map ? TempRegister() : NULL;
2131 
2132  return new(zone()) LStoreNamedField(obj, val, temp);
2133 }
2134 
2135 
2136 LInstruction* LChunkBuilder::DoStoreNamedGeneric(HStoreNamedGeneric* instr) {
2137  LOperand* obj = UseFixed(instr->object(), r1);
2138  LOperand* val = UseFixed(instr->value(), r0);
2139 
2140  LInstruction* result = new(zone()) LStoreNamedGeneric(obj, val);
2141  return MarkAsCall(result, instr);
2142 }
2143 
2144 
2145 LInstruction* LChunkBuilder::DoStringAdd(HStringAdd* instr) {
2146  LOperand* left = UseRegisterAtStart(instr->left());
2147  LOperand* right = UseRegisterAtStart(instr->right());
2148  return MarkAsCall(DefineFixed(new(zone()) LStringAdd(left, right), r0),
2149  instr);
2150 }
2151 
2152 
2153 LInstruction* LChunkBuilder::DoStringCharCodeAt(HStringCharCodeAt* instr) {
2154  LOperand* string = UseTempRegister(instr->string());
2155  LOperand* index = UseTempRegister(instr->index());
2156  LStringCharCodeAt* result = new(zone()) LStringCharCodeAt(string, index);
2157  return AssignEnvironment(AssignPointerMap(DefineAsRegister(result)));
2158 }
2159 
2160 
2161 LInstruction* LChunkBuilder::DoStringCharFromCode(HStringCharFromCode* instr) {
2162  LOperand* char_code = UseRegister(instr->value());
2163  LStringCharFromCode* result = new(zone()) LStringCharFromCode(char_code);
2164  return AssignPointerMap(DefineAsRegister(result));
2165 }
2166 
2167 
2168 LInstruction* LChunkBuilder::DoStringLength(HStringLength* instr) {
2169  LOperand* string = UseRegisterAtStart(instr->value());
2170  return DefineAsRegister(new(zone()) LStringLength(string));
2171 }
2172 
2173 
2174 LInstruction* LChunkBuilder::DoAllocateObject(HAllocateObject* instr) {
2175  LAllocateObject* result =
2176  new(zone()) LAllocateObject(TempRegister(), TempRegister());
2177  return AssignPointerMap(DefineAsRegister(result));
2178 }
2179 
2180 
2181 LInstruction* LChunkBuilder::DoFastLiteral(HFastLiteral* instr) {
2182  return MarkAsCall(DefineFixed(new(zone()) LFastLiteral, r0), instr);
2183 }
2184 
2185 
2186 LInstruction* LChunkBuilder::DoArrayLiteral(HArrayLiteral* instr) {
2187  return MarkAsCall(DefineFixed(new(zone()) LArrayLiteral, r0), instr);
2188 }
2189 
2190 
2191 LInstruction* LChunkBuilder::DoObjectLiteral(HObjectLiteral* instr) {
2192  return MarkAsCall(DefineFixed(new(zone()) LObjectLiteral, r0), instr);
2193 }
2194 
2195 
2196 LInstruction* LChunkBuilder::DoRegExpLiteral(HRegExpLiteral* instr) {
2197  return MarkAsCall(DefineFixed(new(zone()) LRegExpLiteral, r0), instr);
2198 }
2199 
2200 
2201 LInstruction* LChunkBuilder::DoFunctionLiteral(HFunctionLiteral* instr) {
2202  return MarkAsCall(DefineFixed(new(zone()) LFunctionLiteral, r0), instr);
2203 }
2204 
2205 
2206 LInstruction* LChunkBuilder::DoDeleteProperty(HDeleteProperty* instr) {
2207  LOperand* object = UseFixed(instr->object(), r0);
2208  LOperand* key = UseFixed(instr->key(), r1);
2209  LDeleteProperty* result = new(zone()) LDeleteProperty(object, key);
2210  return MarkAsCall(DefineFixed(result, r0), instr);
2211 }
2212 
2213 
2214 LInstruction* LChunkBuilder::DoOsrEntry(HOsrEntry* instr) {
2215  allocator_->MarkAsOsrEntry();
2216  current_block_->last_environment()->set_ast_id(instr->ast_id());
2217  return AssignEnvironment(new(zone()) LOsrEntry);
2218 }
2219 
2220 
2221 LInstruction* LChunkBuilder::DoParameter(HParameter* instr) {
2222  int spill_index = chunk()->GetParameterStackSlot(instr->index());
2223  return DefineAsSpilled(new(zone()) LParameter, spill_index);
2224 }
2225 
2226 
2227 LInstruction* LChunkBuilder::DoUnknownOSRValue(HUnknownOSRValue* instr) {
2228  int spill_index = chunk()->GetNextSpillIndex(false); // Not double-width.
2229  if (spill_index > LUnallocated::kMaxFixedIndex) {
2230  Abort("Too many spill slots needed for OSR");
2231  spill_index = 0;
2232  }
2233  return DefineAsSpilled(new(zone()) LUnknownOSRValue, spill_index);
2234 }
2235 
2236 
2237 LInstruction* LChunkBuilder::DoCallStub(HCallStub* instr) {
2238  argument_count_ -= instr->argument_count();
2239  return MarkAsCall(DefineFixed(new(zone()) LCallStub, r0), instr);
2240 }
2241 
2242 
2243 LInstruction* LChunkBuilder::DoArgumentsObject(HArgumentsObject* instr) {
2244  // There are no real uses of the arguments object.
2245  // arguments.length and element access are supported directly on
2246  // stack arguments, and any real arguments object use causes a bailout.
2247  // So this value is never used.
2248  return NULL;
2249 }
2250 
2251 
2252 LInstruction* LChunkBuilder::DoAccessArgumentsAt(HAccessArgumentsAt* instr) {
2253  LOperand* arguments = UseRegister(instr->arguments());
2254  LOperand* length = UseTempRegister(instr->length());
2255  LOperand* index = UseRegister(instr->index());
2256  LAccessArgumentsAt* result =
2257  new(zone()) LAccessArgumentsAt(arguments, length, index);
2258  return AssignEnvironment(DefineAsRegister(result));
2259 }
2260 
2261 
2262 LInstruction* LChunkBuilder::DoToFastProperties(HToFastProperties* instr) {
2263  LOperand* object = UseFixed(instr->value(), r0);
2264  LToFastProperties* result = new(zone()) LToFastProperties(object);
2265  return MarkAsCall(DefineFixed(result, r0), instr);
2266 }
2267 
2268 
2269 LInstruction* LChunkBuilder::DoTypeof(HTypeof* instr) {
2270  LTypeof* result = new(zone()) LTypeof(UseFixed(instr->value(), r0));
2271  return MarkAsCall(DefineFixed(result, r0), instr);
2272 }
2273 
2274 
2275 LInstruction* LChunkBuilder::DoTypeofIsAndBranch(HTypeofIsAndBranch* instr) {
2276  return new(zone()) LTypeofIsAndBranch(UseTempRegister(instr->value()));
2277 }
2278 
2279 
2280 LInstruction* LChunkBuilder::DoIsConstructCallAndBranch(
2281  HIsConstructCallAndBranch* instr) {
2282  return new(zone()) LIsConstructCallAndBranch(TempRegister());
2283 }
2284 
2285 
2286 LInstruction* LChunkBuilder::DoSimulate(HSimulate* instr) {
2287  HEnvironment* env = current_block_->last_environment();
2288  ASSERT(env != NULL);
2289 
2290  env->set_ast_id(instr->ast_id());
2291 
2292  env->Drop(instr->pop_count());
2293  for (int i = 0; i < instr->values()->length(); ++i) {
2294  HValue* value = instr->values()->at(i);
2295  if (instr->HasAssignedIndexAt(i)) {
2296  env->Bind(instr->GetAssignedIndexAt(i), value);
2297  } else {
2298  env->Push(value);
2299  }
2300  }
2301 
2302  // If there is an instruction pending deoptimization environment create a
2303  // lazy bailout instruction to capture the environment.
2304  if (pending_deoptimization_ast_id_ == instr->ast_id()) {
2305  LInstruction* result = new(zone()) LLazyBailout;
2306  result = AssignEnvironment(result);
2307  // Store the lazy deopt environment with the instruction if needed. Right
2308  // now it is only used for LInstanceOfKnownGlobal.
2309  instruction_pending_deoptimization_environment_->
2310  SetDeferredLazyDeoptimizationEnvironment(result->environment());
2311  instruction_pending_deoptimization_environment_ = NULL;
2312  pending_deoptimization_ast_id_ = AstNode::kNoNumber;
2313  return result;
2314  }
2315 
2316  return NULL;
2317 }
2318 
2319 
2320 LInstruction* LChunkBuilder::DoStackCheck(HStackCheck* instr) {
2321  if (instr->is_function_entry()) {
2322  return MarkAsCall(new(zone()) LStackCheck, instr);
2323  } else {
2324  ASSERT(instr->is_backwards_branch());
2325  return AssignEnvironment(AssignPointerMap(new(zone()) LStackCheck));
2326  }
2327 }
2328 
2329 
2330 LInstruction* LChunkBuilder::DoEnterInlined(HEnterInlined* instr) {
2331  HEnvironment* outer = current_block_->last_environment();
2332  HConstant* undefined = graph()->GetConstantUndefined();
2333  HEnvironment* inner = outer->CopyForInlining(instr->closure(),
2334  instr->arguments_count(),
2335  instr->function(),
2336  undefined,
2337  instr->call_kind(),
2338  instr->is_construct());
2339  if (instr->arguments_var() != NULL) {
2340  inner->Bind(instr->arguments_var(), graph()->GetArgumentsObject());
2341  }
2342  current_block_->UpdateEnvironment(inner);
2343  chunk_->AddInlinedClosure(instr->closure());
2344  return NULL;
2345 }
2346 
2347 
2348 LInstruction* LChunkBuilder::DoLeaveInlined(HLeaveInlined* instr) {
2349  LInstruction* pop = NULL;
2350 
2351  HEnvironment* env = current_block_->last_environment();
2352 
2353  if (instr->arguments_pushed()) {
2354  int argument_count = env->arguments_environment()->parameter_count();
2355  pop = new(zone()) LDrop(argument_count);
2356  argument_count_ -= argument_count;
2357  }
2358 
2359  HEnvironment* outer = current_block_->last_environment()->
2360  DiscardInlined(false);
2361  current_block_->UpdateEnvironment(outer);
2362 
2363  return pop;
2364 }
2365 
2366 
2367 LInstruction* LChunkBuilder::DoIn(HIn* instr) {
2368  LOperand* key = UseRegisterAtStart(instr->key());
2369  LOperand* object = UseRegisterAtStart(instr->object());
2370  LIn* result = new(zone()) LIn(key, object);
2371  return MarkAsCall(DefineFixed(result, r0), instr);
2372 }
2373 
2374 
2375 LInstruction* LChunkBuilder::DoForInPrepareMap(HForInPrepareMap* instr) {
2376  LOperand* object = UseFixed(instr->enumerable(), r0);
2377  LForInPrepareMap* result = new(zone()) LForInPrepareMap(object);
2378  return MarkAsCall(DefineFixed(result, r0), instr, CAN_DEOPTIMIZE_EAGERLY);
2379 }
2380 
2381 
2382 LInstruction* LChunkBuilder::DoForInCacheArray(HForInCacheArray* instr) {
2383  LOperand* map = UseRegister(instr->map());
2384  return AssignEnvironment(DefineAsRegister(
2385  new(zone()) LForInCacheArray(map)));
2386 }
2387 
2388 
2389 LInstruction* LChunkBuilder::DoCheckMapValue(HCheckMapValue* instr) {
2390  LOperand* value = UseRegisterAtStart(instr->value());
2391  LOperand* map = UseRegisterAtStart(instr->map());
2392  return AssignEnvironment(new(zone()) LCheckMapValue(value, map));
2393 }
2394 
2395 
2396 LInstruction* LChunkBuilder::DoLoadFieldByIndex(HLoadFieldByIndex* instr) {
2397  LOperand* object = UseRegister(instr->object());
2398  LOperand* index = UseRegister(instr->index());
2399  return DefineAsRegister(new(zone()) LLoadFieldByIndex(object, index));
2400 }
2401 
2402 
2403 } } // namespace v8::internal
HValue * LookupValue(int id) const
Definition: hydrogen.h:310
#define DEFINE_COMPILE(type)
Definition: lithium-arm.cc:37
int index() const
Definition: lithium.h:62
const DwVfpRegister d11
static LUnallocated * cast(LOperand *op)
Definition: lithium.h:196
static LGap * cast(LInstruction *instr)
Definition: lithium-arm.h:318
static LConstantOperand * Create(int index, Zone *zone)
Definition: lithium.h:263
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:305
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:219
Handle< Object > name() const
Definition: lithium-arm.h:1726
const char * ToCString(const v8::String::Utf8Value &value)
virtual LOperand * InputAt(int i)=0
int GetParameterStackSlot(int index) const
Definition: lithium-arm.cc:497
const Register r3
const DivMagicNumbers DivMagicNumberFor(int32_t divisor)
Definition: utils.cc:93
void PrintF(const char *format,...)
Definition: v8utils.cc:40
static String * cast(Object *obj)
virtual void PrintOutputOperandTo(StringStream *stream)
Definition: lithium-arm.cc:120
Token::Value op() const
Definition: lithium-arm.h:1117
void MarkSpilledDoubleRegister(int allocation_index, LOperand *spill_operand)
Definition: lithium-arm.cc:84
LParallelMove * GetOrCreateParallelMove(InnerPosition pos, Zone *zone)
Definition: lithium-arm.h:336
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:279
int ParameterAt(int index)
Definition: lithium-arm.cc:508
value format" "after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false, "print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false, "print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false, "report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true, "garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true, "flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true, "use incremental marking") DEFINE_bool(incremental_marking_steps, true, "do incremental marking steps") DEFINE_bool(trace_incremental_marking, false, "trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true, "Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false, "Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true, "use inline caching") DEFINE_bool(native_code_counters, false, "generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false, "Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true, "Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false, "Never perform compaction on full GC-testing only") DEFINE_bool(compact_code_space, true, "Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true, "Flush inline caches prior to mark compact collection and" "flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0, "Default seed for initializing random generator" "(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true, "allows verbose printing") DEFINE_bool(allow_natives_syntax, false, "allow natives syntax") DEFINE_bool(trace_sim, false, "Trace simulator execution") DEFINE_bool(check_icache, false, "Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0, "Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8, "Stack alingment in bytes in simulator(4 or 8, 8 is default)") DEFINE_bool(trace_exception, false, "print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false, "preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true, "randomize hashes to avoid predictable hash collisions" "(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0, "Fixed seed to use to hash property keys(0 means random)" "(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false, "activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true, "generate optimized regexp code") DEFINE_bool(testing_bool_flag, true, "testing_bool_flag") DEFINE_int(testing_int_flag, 13, "testing_int_flag") DEFINE_float(testing_float_flag, 2.5, "float-flag") DEFINE_string(testing_string_flag, "Hello, world!", "string-flag") DEFINE_int(testing_prng_seed, 42, "Seed used for threading test randomness") DEFINE_string(testing_serialization_file, "/tmp/serdes", "file in which to serialize heap") DEFINE_bool(help, false, "Print usage message, including flags, on console") DEFINE_bool(dump_counters, false, "Dump counters on exit") DEFINE_string(map_counters, "", "Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT, "Pass all remaining arguments to the script.Alias for\"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#43"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2#define FLAG_MODE_DEFINE_DEFAULTS#1"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flag-definitions.h"1#define FLAG_FULL(ftype, ctype, nam, def, cmt)#define FLAG_READONLY(ftype, ctype, nam, def, cmt)#define DEFINE_implication(whenflag, thenflag)#define DEFINE_bool(nam, def, cmt)#define DEFINE_int(nam, def, cmt)#define DEFINE_float(nam, def, cmt)#define DEFINE_string(nam, def, cmt)#define DEFINE_args(nam, def, cmt)#define FLAG DEFINE_bool(use_strict, false,"enforce strict mode") DEFINE_bool(es5_readonly, false,"activate correct semantics for inheriting readonliness") DEFINE_bool(es52_globals, false,"activate new semantics for global var declarations") DEFINE_bool(harmony_typeof, false,"enable harmony semantics for typeof") DEFINE_bool(harmony_scoping, false,"enable harmony block scoping") DEFINE_bool(harmony_modules, false,"enable harmony modules (implies block scoping)") DEFINE_bool(harmony_proxies, false,"enable harmony proxies") DEFINE_bool(harmony_collections, false,"enable harmony collections (sets, maps, and weak maps)") DEFINE_bool(harmony, false,"enable all harmony features (except typeof)") DEFINE_implication(harmony, harmony_scoping) DEFINE_implication(harmony, harmony_modules) DEFINE_implication(harmony, harmony_proxies) DEFINE_implication(harmony, harmony_collections) DEFINE_implication(harmony_modules, harmony_scoping) DEFINE_bool(packed_arrays, false,"optimizes arrays that have no holes") DEFINE_bool(smi_only_arrays, true,"tracks arrays with only smi values") DEFINE_bool(clever_optimizations, true,"Optimize object size, Array shift, DOM strings and string +") DEFINE_bool(unbox_double_arrays, true,"automatically unbox arrays of doubles") DEFINE_bool(string_slices, true,"use string slices") DEFINE_bool(crankshaft, true,"use crankshaft") DEFINE_string(hydrogen_filter,"","optimization filter") DEFINE_bool(use_range, true,"use hydrogen range analysis") DEFINE_bool(eliminate_dead_phis, true,"eliminate dead phis") DEFINE_bool(use_gvn, true,"use hydrogen global value numbering") DEFINE_bool(use_canonicalizing, true,"use hydrogen instruction canonicalizing") DEFINE_bool(use_inlining, true,"use function inlining") DEFINE_int(max_inlined_source_size, 600,"maximum source size in bytes considered for a single inlining") DEFINE_int(max_inlined_nodes, 196,"maximum number of AST nodes considered for a single inlining") DEFINE_int(max_inlined_nodes_cumulative, 196,"maximum cumulative number of AST nodes considered for inlining") DEFINE_bool(loop_invariant_code_motion, true,"loop invariant code motion") DEFINE_bool(collect_megamorphic_maps_from_stub_cache, true,"crankshaft harvests type feedback from stub cache") DEFINE_bool(hydrogen_stats, false,"print statistics for hydrogen") DEFINE_bool(trace_hydrogen, false,"trace generated hydrogen to file") DEFINE_string(trace_phase,"Z","trace generated IR for specified phases") DEFINE_bool(trace_inlining, false,"trace inlining decisions") DEFINE_bool(trace_alloc, false,"trace register allocator") DEFINE_bool(trace_all_uses, false,"trace all use positions") DEFINE_bool(trace_range, false,"trace range analysis") DEFINE_bool(trace_gvn, false,"trace global value numbering") DEFINE_bool(trace_representation, false,"trace representation types") DEFINE_bool(stress_pointer_maps, false,"pointer map for every instruction") DEFINE_bool(stress_environments, false,"environment for every instruction") DEFINE_int(deopt_every_n_times, 0,"deoptimize every n times a deopt point is passed") DEFINE_bool(trap_on_deopt, false,"put a break point before deoptimizing") DEFINE_bool(deoptimize_uncommon_cases, true,"deoptimize uncommon cases") DEFINE_bool(polymorphic_inlining, true,"polymorphic inlining") DEFINE_bool(use_osr, true,"use on-stack replacement") DEFINE_bool(array_bounds_checks_elimination, false,"perform array bounds checks elimination") DEFINE_bool(array_index_dehoisting, false,"perform array index dehoisting") DEFINE_bool(trace_osr, false,"trace on-stack replacement") DEFINE_int(stress_runs, 0,"number of stress runs") DEFINE_bool(optimize_closures, true,"optimize closures") DEFINE_bool(inline_construct, true,"inline constructor calls") DEFINE_bool(inline_arguments, true,"inline functions with arguments object") DEFINE_int(loop_weight, 1,"loop weight for representation inference") DEFINE_bool(optimize_for_in, true,"optimize functions containing for-in loops") DEFINE_bool(experimental_profiler, true,"enable all profiler experiments") DEFINE_bool(watch_ic_patching, false,"profiler considers IC stability") DEFINE_int(frame_count, 1,"number of stack frames inspected by the profiler") DEFINE_bool(self_optimization, false,"primitive functions trigger their own optimization") DEFINE_bool(direct_self_opt, false,"call recompile stub directly when self-optimizing") DEFINE_bool(retry_self_opt, false,"re-try self-optimization if it failed") DEFINE_bool(count_based_interrupts, false,"trigger profiler ticks based on counting instead of timing") DEFINE_bool(interrupt_at_exit, false,"insert an interrupt check at function exit") DEFINE_bool(weighted_back_edges, false,"weight back edges by jump distance for interrupt triggering") DEFINE_int(interrupt_budget, 5900,"execution budget before interrupt is triggered") DEFINE_int(type_info_threshold, 15,"percentage of ICs that must have type info to allow optimization") DEFINE_int(self_opt_count, 130,"call count before self-optimization") DEFINE_implication(experimental_profiler, watch_ic_patching) DEFINE_implication(experimental_profiler, self_optimization) DEFINE_implication(experimental_profiler, retry_self_opt) DEFINE_implication(experimental_profiler, count_based_interrupts) DEFINE_implication(experimental_profiler, interrupt_at_exit) DEFINE_implication(experimental_profiler, weighted_back_edges) DEFINE_bool(trace_opt_verbose, false,"extra verbose compilation tracing") DEFINE_implication(trace_opt_verbose, trace_opt) DEFINE_bool(debug_code, false,"generate extra code (assertions) for debugging") DEFINE_bool(code_comments, false,"emit comments in code disassembly") DEFINE_bool(enable_sse2, true,"enable use of SSE2 instructions if available") DEFINE_bool(enable_sse3, true,"enable use of SSE3 instructions if available") DEFINE_bool(enable_sse4_1, true,"enable use of SSE4.1 instructions if available") DEFINE_bool(enable_cmov, true,"enable use of CMOV instruction if available") DEFINE_bool(enable_rdtsc, true,"enable use of RDTSC instruction if available") DEFINE_bool(enable_sahf, true,"enable use of SAHF instruction if available (X64 only)") DEFINE_bool(enable_vfp3, true,"enable use of VFP3 instructions if available - this implies ""enabling ARMv7 instructions (ARM only)") DEFINE_bool(enable_armv7, true,"enable use of ARMv7 instructions if available (ARM only)") DEFINE_bool(enable_fpu, true,"enable use of MIPS FPU instructions if available (MIPS only)") DEFINE_string(expose_natives_as, NULL,"expose natives in global object") DEFINE_string(expose_debug_as, NULL,"expose debug in global object") DEFINE_bool(expose_gc, false,"expose gc extension") DEFINE_bool(expose_externalize_string, false,"expose externalize string extension") DEFINE_int(stack_trace_limit, 10,"number of stack frames to capture") DEFINE_bool(builtins_in_stack_traces, false,"show built-in functions in stack traces") DEFINE_bool(disable_native_files, false,"disable builtin natives files") DEFINE_bool(inline_new, true,"use fast inline allocation") DEFINE_bool(stack_trace_on_abort, true,"print a stack trace if an assertion failure occurs") DEFINE_bool(trace, false,"trace function calls") DEFINE_bool(mask_constants_with_cookie, true,"use random jit cookie to mask large constants") DEFINE_bool(lazy, true,"use lazy compilation") DEFINE_bool(trace_opt, false,"trace lazy optimization") DEFINE_bool(trace_opt_stats, false,"trace lazy optimization statistics") DEFINE_bool(opt, true,"use adaptive optimizations") DEFINE_bool(always_opt, false,"always try to optimize functions") DEFINE_bool(prepare_always_opt, false,"prepare for turning on always opt") DEFINE_bool(trace_deopt, false,"trace deoptimization") DEFINE_int(min_preparse_length, 1024,"minimum length for automatic enable preparsing") DEFINE_bool(always_full_compiler, false,"try to use the dedicated run-once backend for all code") DEFINE_bool(trace_bailout, false,"print reasons for falling back to using the classic V8 backend") DEFINE_bool(compilation_cache, true,"enable compilation cache") DEFINE_bool(cache_prototype_transitions, true,"cache prototype transitions") DEFINE_bool(trace_debug_json, false,"trace debugging JSON request/response") DEFINE_bool(debugger_auto_break, true,"automatically set the debug break flag when debugger commands are ""in the queue") DEFINE_bool(enable_liveedit, true,"enable liveedit experimental feature") DEFINE_bool(break_on_abort, true,"always cause a debug break before aborting") DEFINE_int(stack_size, kPointerSize *123,"default size of stack region v8 is allowed to use (in kBytes)") DEFINE_int(max_stack_trace_source_length, 300,"maximum length of function source code printed in a stack trace.") DEFINE_bool(always_inline_smi_code, false,"always inline smi code in non-opt code") DEFINE_int(max_new_space_size, 0,"max size of the new generation (in kBytes)") DEFINE_int(max_old_space_size, 0,"max size of the old generation (in Mbytes)") DEFINE_int(max_executable_size, 0,"max size of executable memory (in Mbytes)") DEFINE_bool(gc_global, false,"always perform global GCs") DEFINE_int(gc_interval,-1,"garbage collect after <n> allocations") DEFINE_bool(trace_gc, false,"print one trace line following each garbage collection") DEFINE_bool(trace_gc_nvp, false,"print one detailed trace line in name=value format ""after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false,"print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false,"print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false,"report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true,"garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true,"flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true,"use incremental marking") DEFINE_bool(incremental_marking_steps, true,"do incremental marking steps") DEFINE_bool(trace_incremental_marking, false,"trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true,"Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false,"Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true,"use inline caching") DEFINE_bool(native_code_counters, false,"generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false,"Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true,"Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false,"Never perform compaction on full GC - testing only") DEFINE_bool(compact_code_space, true,"Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true,"Flush inline caches prior to mark compact collection and ""flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0,"Default seed for initializing random generator ""(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true,"allows verbose printing") DEFINE_bool(allow_natives_syntax, false,"allow natives syntax") DEFINE_bool(trace_sim, false,"Trace simulator execution") DEFINE_bool(check_icache, false,"Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0,"Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8,"Stack alingment in bytes in simulator (4 or 8, 8 is default)") DEFINE_bool(trace_exception, false,"print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false,"preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true,"randomize hashes to avoid predictable hash collisions ""(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0,"Fixed seed to use to hash property keys (0 means random)""(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false,"activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true,"generate optimized regexp code") DEFINE_bool(testing_bool_flag, true,"testing_bool_flag") DEFINE_int(testing_int_flag, 13,"testing_int_flag") DEFINE_float(testing_float_flag, 2.5,"float-flag") DEFINE_string(testing_string_flag,"Hello, world!","string-flag") DEFINE_int(testing_prng_seed, 42,"Seed used for threading test randomness") DEFINE_string(testing_serialization_file,"/tmp/serdes","file in which to serialize heap") DEFINE_bool(help, false,"Print usage message, including flags, on console") DEFINE_bool(dump_counters, false,"Dump counters on exit") DEFINE_string(map_counters,"","Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT,"Pass all remaining arguments to the script. Alias for \"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#47"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2 namespace{struct Flag{enum FlagType{TYPE_BOOL, TYPE_INT, TYPE_FLOAT, TYPE_STRING, TYPE_ARGS} name
Definition: flags.cc:1349
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:226
static Representation Integer32()
Handle< String > name() const
Definition: lithium-arm.h:1542
static const int kNoNumber
Definition: ast.h:197
int int32_t
Definition: unicode.cc:47
static const int kNumAllocatableRegisters
Handle< Object > name() const
Definition: lithium-arm.h:1705
LEnvironment * environment() const
Definition: lithium-arm.h:240
Token::Value op() const
Definition: lithium-arm.h:610
#define ASSERT(condition)
Definition: checks.h:270
virtual const char * Mnemonic() const =0
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:111
const Register r2
void PrintTo(StringStream *stream)
Definition: lithium.cc:203
bool IsSimpleMapChangeTransition(ElementsKind from_kind, ElementsKind to_kind)
#define LITHIUM_CONCRETE_INSTRUCTION_LIST(V)
Definition: lithium-arm.h:49
LChunk(CompilationInfo *info, HGraph *graph)
Definition: lithium-arm.cc:410
Representation representation() const
EqualityKind kind() const
Definition: lithium-arm.h:668
int last_instruction_index() const
Definition: hydrogen.h:90
LGap * GetGapAt(int index) const
Definition: lithium-arm.cc:515
void Add(Vector< const char > format, Vector< FmtElm > elms)
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:368
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:293
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:299
virtual bool HasResult() const =0
#define UNREACHABLE()
Definition: checks.h:50
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:324
DwVfpRegister DoubleRegister
int GetNextSpillIndex(bool is_double)
Definition: lithium-arm.cc:420
void PrintTo(StringStream *stream)
Definition: lithium.cc:158
Zone * zone() const
Definition: hydrogen.h:250
LLabel * replacement() const
Definition: lithium-arm.h:410
static HUnaryOperation * cast(HValue *value)
virtual const char * Mnemonic() const
Definition: lithium-arm.cc:156
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:201
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:330
bool is_loop_header() const
Definition: lithium-arm.h:408
void MarkSpilledRegister(int allocation_index, LOperand *spill_operand)
Definition: lithium-arm.cc:54
LOperand * GetNextSpillSlot(bool is_double)
Definition: lithium-arm.cc:427
void AddMove(LOperand *from, LOperand *to, Zone *zone)
Definition: lithium.h:401
static const char * String(Value tok)
Definition: token.h:275
const int kPointerSize
Definition: globals.h:234
static LDoubleStackSlot * Create(int index, Zone *zone)
Definition: lithium.h:324
const DwVfpRegister d7
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:336
int block_id() const
Definition: lithium-arm.h:407
bool HasEnvironment() const
Definition: lithium-arm.h:241
static void VPrint(const char *format, va_list args)
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:341
virtual LOperand * result()=0
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:233
static int ToAllocationIndex(Register reg)
Definition: assembler-arm.h:77
Zone * zone() const
Definition: lithium-arm.h:2275
const DwVfpRegister d3
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:395
const Register r0
bool IsPowerOf2(T x)
Definition: utils.h:50
virtual void PrintTo(StringStream *stream)
Definition: lithium-arm.cc:92
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:125
static LStackSlot * Create(int index, Zone *zone)
Definition: lithium.h:299
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:359
static const int kMaxFixedIndex
Definition: lithium.h:157
bool IsGapAt(int index) const
Definition: lithium-arm.cc:520
LOsrEntry()
Definition: lithium-arm.cc:44
virtual bool IsControl() const
Definition: lithium-arm.h:237
LPointerMap * pointer_map() const
Definition: lithium-arm.h:244
const ZoneList< HBasicBlock * > * blocks() const
Definition: hydrogen.h:252
int first_instruction_index() const
Definition: hydrogen.h:86
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:312
LLabel * GetLabel(int block_id) const
Definition: lithium-arm.h:2249
virtual DECLARE_CONCRETE_INSTRUCTION(StringCompareAndBranch,"string-compare-and-branch") Token void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:247
virtual bool IsGap() const
Definition: lithium-arm.h:235
const Register r1
void AddInstruction(LInstruction *instruction, HBasicBlock *block)
Definition: lithium-arm.cc:473
HGraph * graph() const
Definition: lithium-arm.h:2241
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:145
int block_id() const
Definition: lithium-arm.h:369
void PrintDataTo(StringStream *stream) const
Definition: lithium.cc:137
virtual const char * Mnemonic() const
Definition: lithium-arm.cc:170
CompilationInfo * info() const
Definition: lithium-arm.h:2240
static int ToAllocationIndex(DwVfpRegister reg)
static const int kNumAllocatableRegisters
Definition: assembler-arm.h:74
Token::Value op() const
Definition: lithium-arm.h:1140
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination trace on stack replacement optimize closures functions with arguments object optimize functions containing for in loops profiler considers IC stability primitive functions trigger their own optimization re try self optimization if it failed insert an interrupt check at function exit execution budget before interrupt is triggered call count before self optimization self_optimization count_based_interrupts weighted_back_edges trace_opt emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 enable use of ARMv7 instructions if enable use of MIPS FPU instructions if NULL
Definition: flags.cc:274
void AddGapMove(int index, LOperand *from, LOperand *to)
Definition: lithium-arm.cc:531
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:210
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:190
Handle< String > name() const
Definition: lithium-arm.h:1516
const DwVfpRegister d2
LConstantOperand * DefineConstantOperand(HConstant *constant)
Definition: lithium-arm.cc:492
bool HasFixedPolicy() const
Definition: lithium.h:163
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:195
void set_replacement(LLabel *label)
Definition: lithium-arm.h:411
Representation LookupLiteralRepresentation(LConstantOperand *operand) const
Definition: lithium-arm.cc:542
bool HasPointerMap() const
Definition: lithium-arm.h:245
int NearestGapPos(int index) const
Definition: lithium-arm.cc:525
const DwVfpRegister d1
const DwVfpRegister d10
bool IsRedundant() const
Definition: lithium-arm.cc:134
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:319
virtual int InputCount()=0
const DivMagicNumbers InvalidDivMagicNumber
Definition: utils.h:101
void set_lithium_position(int pos)
Definition: lithium.h:436
static HValue * cast(HValue *value)
Handle< String > type_literal()
Definition: lithium-arm.h:2088
FlagType type() const
Definition: flags.cc:1358
void PrintTo(StringStream *stream)
Definition: lithium.cc:35
Handle< Object > LookupLiteral(LConstantOperand *operand) const
Definition: lithium-arm.cc:537
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:288
const ZoneList< LInstruction * > * instructions() const
Definition: lithium-arm.h:2242
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:348
const Register r4