v8  3.11.10(node0.8.26)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
lithium-mips.cc
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "v8.h"
29 
30 #include "lithium-allocator-inl.h"
31 #include "mips/lithium-mips.h"
33 
34 namespace v8 {
35 namespace internal {
36 
37 #define DEFINE_COMPILE(type) \
38  void L##type::CompileToNative(LCodeGen* generator) { \
39  generator->Do##type(this); \
40  }
42 #undef DEFINE_COMPILE
43 
45  for (int i = 0; i < Register::kNumAllocatableRegisters; ++i) {
46  register_spills_[i] = NULL;
47  }
48  for (int i = 0; i < DoubleRegister::kNumAllocatableRegisters; ++i) {
49  double_register_spills_[i] = NULL;
50  }
51 }
52 
53 
54 void LOsrEntry::MarkSpilledRegister(int allocation_index,
55  LOperand* spill_operand) {
56  ASSERT(spill_operand->IsStackSlot());
57  ASSERT(register_spills_[allocation_index] == NULL);
58  register_spills_[allocation_index] = spill_operand;
59 }
60 
61 
62 #ifdef DEBUG
63 void LInstruction::VerifyCall() {
64  // Call instructions can use only fixed registers as temporaries and
65  // outputs because all registers are blocked by the calling convention.
66  // Inputs operands must use a fixed register or use-at-start policy or
67  // a non-register policy.
68  ASSERT(Output() == NULL ||
69  LUnallocated::cast(Output())->HasFixedPolicy() ||
70  !LUnallocated::cast(Output())->HasRegisterPolicy());
71  for (UseIterator it(this); !it.Done(); it.Advance()) {
72  LUnallocated* operand = LUnallocated::cast(it.Current());
73  ASSERT(operand->HasFixedPolicy() ||
74  operand->IsUsedAtStart());
75  }
76  for (TempIterator it(this); !it.Done(); it.Advance()) {
77  LUnallocated* operand = LUnallocated::cast(it.Current());
78  ASSERT(operand->HasFixedPolicy() ||!operand->HasRegisterPolicy());
79  }
80 }
81 #endif
82 
83 
84 void LOsrEntry::MarkSpilledDoubleRegister(int allocation_index,
85  LOperand* spill_operand) {
86  ASSERT(spill_operand->IsDoubleStackSlot());
87  ASSERT(double_register_spills_[allocation_index] == NULL);
88  double_register_spills_[allocation_index] = spill_operand;
89 }
90 
91 
92 void LInstruction::PrintTo(StringStream* stream) {
93  stream->Add("%s ", this->Mnemonic());
94 
95  PrintOutputOperandTo(stream);
96 
97  PrintDataTo(stream);
98 
99  if (HasEnvironment()) {
100  stream->Add(" ");
101  environment()->PrintTo(stream);
102  }
103 
104  if (HasPointerMap()) {
105  stream->Add(" ");
106  pointer_map()->PrintTo(stream);
107  }
108 }
109 
110 
111 void LInstruction::PrintDataTo(StringStream* stream) {
112  stream->Add("= ");
113  for (int i = 0; i < InputCount(); i++) {
114  if (i > 0) stream->Add(" ");
115  InputAt(i)->PrintTo(stream);
116  }
117 }
118 
119 
120 void LInstruction::PrintOutputOperandTo(StringStream* stream) {
121  if (HasResult()) result()->PrintTo(stream);
122 }
123 
124 
125 void LLabel::PrintDataTo(StringStream* stream) {
126  LGap::PrintDataTo(stream);
127  LLabel* rep = replacement();
128  if (rep != NULL) {
129  stream->Add(" Dead block replaced with B%d", rep->block_id());
130  }
131 }
132 
133 
134 bool LGap::IsRedundant() const {
135  for (int i = 0; i < 4; i++) {
136  if (parallel_moves_[i] != NULL && !parallel_moves_[i]->IsRedundant()) {
137  return false;
138  }
139  }
140 
141  return true;
142 }
143 
144 
145 void LGap::PrintDataTo(StringStream* stream) {
146  for (int i = 0; i < 4; i++) {
147  stream->Add("(");
148  if (parallel_moves_[i] != NULL) {
149  parallel_moves_[i]->PrintDataTo(stream);
150  }
151  stream->Add(") ");
152  }
153 }
154 
155 
156 const char* LArithmeticD::Mnemonic() const {
157  switch (op()) {
158  case Token::ADD: return "add-d";
159  case Token::SUB: return "sub-d";
160  case Token::MUL: return "mul-d";
161  case Token::DIV: return "div-d";
162  case Token::MOD: return "mod-d";
163  default:
164  UNREACHABLE();
165  return NULL;
166  }
167 }
168 
169 
170 const char* LArithmeticT::Mnemonic() const {
171  switch (op()) {
172  case Token::ADD: return "add-t";
173  case Token::SUB: return "sub-t";
174  case Token::MUL: return "mul-t";
175  case Token::MOD: return "mod-t";
176  case Token::DIV: return "div-t";
177  case Token::BIT_AND: return "bit-and-t";
178  case Token::BIT_OR: return "bit-or-t";
179  case Token::BIT_XOR: return "bit-xor-t";
180  case Token::SHL: return "sll-t";
181  case Token::SAR: return "sra-t";
182  case Token::SHR: return "srl-t";
183  default:
184  UNREACHABLE();
185  return NULL;
186  }
187 }
188 
189 
190 void LGoto::PrintDataTo(StringStream* stream) {
191  stream->Add("B%d", block_id());
192 }
193 
194 
195 void LBranch::PrintDataTo(StringStream* stream) {
196  stream->Add("B%d | B%d on ", true_block_id(), false_block_id());
197  InputAt(0)->PrintTo(stream);
198 }
199 
200 
201 void LCmpIDAndBranch::PrintDataTo(StringStream* stream) {
202  stream->Add("if ");
203  InputAt(0)->PrintTo(stream);
204  stream->Add(" %s ", Token::String(op()));
205  InputAt(1)->PrintTo(stream);
206  stream->Add(" then B%d else B%d", true_block_id(), false_block_id());
207 }
208 
209 
210 void LIsNilAndBranch::PrintDataTo(StringStream* stream) {
211  stream->Add("if ");
212  InputAt(0)->PrintTo(stream);
213  stream->Add(kind() == kStrictEquality ? " === " : " == ");
214  stream->Add(nil() == kNullValue ? "null" : "undefined");
215  stream->Add(" then B%d else B%d", true_block_id(), false_block_id());
216 }
217 
218 
219 void LIsObjectAndBranch::PrintDataTo(StringStream* stream) {
220  stream->Add("if is_object(");
221  InputAt(0)->PrintTo(stream);
222  stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
223 }
224 
225 
226 void LIsStringAndBranch::PrintDataTo(StringStream* stream) {
227  stream->Add("if is_string(");
228  InputAt(0)->PrintTo(stream);
229  stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
230 }
231 
232 
233 void LIsSmiAndBranch::PrintDataTo(StringStream* stream) {
234  stream->Add("if is_smi(");
235  InputAt(0)->PrintTo(stream);
236  stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
237 }
238 
239 
240 void LIsUndetectableAndBranch::PrintDataTo(StringStream* stream) {
241  stream->Add("if is_undetectable(");
242  InputAt(0)->PrintTo(stream);
243  stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
244 }
245 
246 
247 void LStringCompareAndBranch::PrintDataTo(StringStream* stream) {
248  stream->Add("if string_compare(");
249  InputAt(0)->PrintTo(stream);
250  InputAt(1)->PrintTo(stream);
251  stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
252 }
253 
254 
255 void LHasInstanceTypeAndBranch::PrintDataTo(StringStream* stream) {
256  stream->Add("if has_instance_type(");
257  InputAt(0)->PrintTo(stream);
258  stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
259 }
260 
261 
262 void LHasCachedArrayIndexAndBranch::PrintDataTo(StringStream* stream) {
263  stream->Add("if has_cached_array_index(");
264  InputAt(0)->PrintTo(stream);
265  stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
266 }
267 
268 
269 void LClassOfTestAndBranch::PrintDataTo(StringStream* stream) {
270  stream->Add("if class_of_test(");
271  InputAt(0)->PrintTo(stream);
272  stream->Add(", \"%o\") then B%d else B%d",
273  *hydrogen()->class_name(),
274  true_block_id(),
275  false_block_id());
276 }
277 
278 
279 void LTypeofIsAndBranch::PrintDataTo(StringStream* stream) {
280  stream->Add("if typeof ");
281  InputAt(0)->PrintTo(stream);
282  stream->Add(" == \"%s\" then B%d else B%d",
283  *hydrogen()->type_literal()->ToCString(),
285 }
286 
287 
288 void LCallConstantFunction::PrintDataTo(StringStream* stream) {
289  stream->Add("#%d / ", arity());
290 }
291 
292 
293 void LUnaryMathOperation::PrintDataTo(StringStream* stream) {
294  stream->Add("/%s ", hydrogen()->OpName());
295  InputAt(0)->PrintTo(stream);
296 }
297 
298 
299 void LLoadContextSlot::PrintDataTo(StringStream* stream) {
300  InputAt(0)->PrintTo(stream);
301  stream->Add("[%d]", slot_index());
302 }
303 
304 
305 void LStoreContextSlot::PrintDataTo(StringStream* stream) {
306  InputAt(0)->PrintTo(stream);
307  stream->Add("[%d] <- ", slot_index());
308  InputAt(1)->PrintTo(stream);
309 }
310 
311 
312 void LInvokeFunction::PrintDataTo(StringStream* stream) {
313  stream->Add("= ");
314  InputAt(0)->PrintTo(stream);
315  stream->Add(" #%d / ", arity());
316 }
317 
318 
319 void LCallKeyed::PrintDataTo(StringStream* stream) {
320  stream->Add("[a2] #%d / ", arity());
321 }
322 
323 
324 void LCallNamed::PrintDataTo(StringStream* stream) {
325  SmartArrayPointer<char> name_string = name()->ToCString();
326  stream->Add("%s #%d / ", *name_string, arity());
327 }
328 
329 
330 void LCallGlobal::PrintDataTo(StringStream* stream) {
331  SmartArrayPointer<char> name_string = name()->ToCString();
332  stream->Add("%s #%d / ", *name_string, arity());
333 }
334 
335 
336 void LCallKnownGlobal::PrintDataTo(StringStream* stream) {
337  stream->Add("#%d / ", arity());
338 }
339 
340 
341 void LCallNew::PrintDataTo(StringStream* stream) {
342  stream->Add("= ");
343  InputAt(0)->PrintTo(stream);
344  stream->Add(" #%d / ", arity());
345 }
346 
347 
348 void LAccessArgumentsAt::PrintDataTo(StringStream* stream) {
349  arguments()->PrintTo(stream);
350 
351  stream->Add(" length ");
352  length()->PrintTo(stream);
353 
354  stream->Add(" index ");
355  index()->PrintTo(stream);
356 }
357 
358 
359 void LStoreNamedField::PrintDataTo(StringStream* stream) {
360  object()->PrintTo(stream);
361  stream->Add(".");
362  stream->Add(*String::cast(*name())->ToCString());
363  stream->Add(" <- ");
364  value()->PrintTo(stream);
365 }
366 
367 
368 void LStoreNamedGeneric::PrintDataTo(StringStream* stream) {
369  object()->PrintTo(stream);
370  stream->Add(".");
371  stream->Add(*String::cast(*name())->ToCString());
372  stream->Add(" <- ");
373  value()->PrintTo(stream);
374 }
375 
376 
377 void LStoreKeyedFastElement::PrintDataTo(StringStream* stream) {
378  object()->PrintTo(stream);
379  stream->Add("[");
380  key()->PrintTo(stream);
381  stream->Add("] <- ");
382  value()->PrintTo(stream);
383 }
384 
385 
386 void LStoreKeyedFastDoubleElement::PrintDataTo(StringStream* stream) {
387  elements()->PrintTo(stream);
388  stream->Add("[");
389  key()->PrintTo(stream);
390  stream->Add("] <- ");
391  value()->PrintTo(stream);
392 }
393 
394 
395 void LStoreKeyedGeneric::PrintDataTo(StringStream* stream) {
396  object()->PrintTo(stream);
397  stream->Add("[");
398  key()->PrintTo(stream);
399  stream->Add("] <- ");
400  value()->PrintTo(stream);
401 }
402 
403 
404 void LTransitionElementsKind::PrintDataTo(StringStream* stream) {
405  object()->PrintTo(stream);
406  stream->Add(" %p -> %p", *original_map(), *transitioned_map());
407 }
408 
409 
410 LChunk::LChunk(CompilationInfo* info, HGraph* graph)
411  : spill_slot_count_(0),
412  info_(info),
413  graph_(graph),
414  instructions_(32, graph->zone()),
415  pointer_maps_(8, graph->zone()),
416  inlined_closures_(1, graph->zone()) {
417 }
418 
419 
420 int LChunk::GetNextSpillIndex(bool is_double) {
421  // Skip a slot if for a double-width slot.
422  if (is_double) spill_slot_count_++;
423  return spill_slot_count_++;
424 }
425 
426 
427 LOperand* LChunk::GetNextSpillSlot(bool is_double) {
428  int index = GetNextSpillIndex(is_double);
429  if (is_double) {
430  return LDoubleStackSlot::Create(index, zone());
431  } else {
432  return LStackSlot::Create(index, zone());
433  }
434 }
435 
436 
437 void LChunk::MarkEmptyBlocks() {
438  HPhase phase("L_Mark empty blocks", this);
439  for (int i = 0; i < graph()->blocks()->length(); ++i) {
440  HBasicBlock* block = graph()->blocks()->at(i);
441  int first = block->first_instruction_index();
442  int last = block->last_instruction_index();
443  LInstruction* first_instr = instructions()->at(first);
444  LInstruction* last_instr = instructions()->at(last);
445 
446  LLabel* label = LLabel::cast(first_instr);
447  if (last_instr->IsGoto()) {
448  LGoto* goto_instr = LGoto::cast(last_instr);
449  if (label->IsRedundant() &&
450  !label->is_loop_header()) {
451  bool can_eliminate = true;
452  for (int i = first + 1; i < last && can_eliminate; ++i) {
453  LInstruction* cur = instructions()->at(i);
454  if (cur->IsGap()) {
455  LGap* gap = LGap::cast(cur);
456  if (!gap->IsRedundant()) {
457  can_eliminate = false;
458  }
459  } else {
460  can_eliminate = false;
461  }
462  }
463 
464  if (can_eliminate) {
465  label->set_replacement(GetLabel(goto_instr->block_id()));
466  }
467  }
468  }
469  }
470 }
471 
472 
473 void LChunk::AddInstruction(LInstruction* instr, HBasicBlock* block) {
474  LInstructionGap* gap = new(graph_->zone()) LInstructionGap(block);
475  int index = -1;
476  if (instr->IsControl()) {
477  instructions_.Add(gap, zone());
478  index = instructions_.length();
479  instructions_.Add(instr, zone());
480  } else {
481  index = instructions_.length();
482  instructions_.Add(instr, zone());
483  instructions_.Add(gap, zone());
484  }
485  if (instr->HasPointerMap()) {
486  pointer_maps_.Add(instr->pointer_map(), zone());
487  instr->pointer_map()->set_lithium_position(index);
488  }
489 }
490 
491 
492 LConstantOperand* LChunk::DefineConstantOperand(HConstant* constant) {
493  return LConstantOperand::Create(constant->id(), zone());
494 }
495 
496 
497 int LChunk::GetParameterStackSlot(int index) const {
498  // The receiver is at index 0, the first parameter at index 1, so we
499  // shift all parameter indexes down by the number of parameters, and
500  // make sure they end up negative so they are distinguishable from
501  // spill slots.
502  int result = index - info()->scope()->num_parameters() - 1;
503  ASSERT(result < 0);
504  return result;
505 }
506 
507 // A parameter relative to ebp in the arguments stub.
508 int LChunk::ParameterAt(int index) {
509  ASSERT(-1 <= index); // -1 is the receiver.
510  return (1 + info()->scope()->num_parameters() - index) *
511  kPointerSize;
512 }
513 
514 
515 LGap* LChunk::GetGapAt(int index) const {
516  return LGap::cast(instructions_[index]);
517 }
518 
519 
520 bool LChunk::IsGapAt(int index) const {
521  return instructions_[index]->IsGap();
522 }
523 
524 
525 int LChunk::NearestGapPos(int index) const {
526  while (!IsGapAt(index)) index--;
527  return index;
528 }
529 
530 
531 void LChunk::AddGapMove(int index, LOperand* from, LOperand* to) {
532  GetGapAt(index)->GetOrCreateParallelMove(
533  LGap::START, zone())->AddMove(from, to, zone());
534 }
535 
536 
537 Handle<Object> LChunk::LookupLiteral(LConstantOperand* operand) const {
538  return HConstant::cast(graph_->LookupValue(operand->index()))->handle();
539 }
540 
541 
542 Representation LChunk::LookupLiteralRepresentation(
543  LConstantOperand* operand) const {
544  return graph_->LookupValue(operand->index())->representation();
545 }
546 
547 
548 LChunk* LChunkBuilder::Build() {
549  ASSERT(is_unused());
550  chunk_ = new(zone()) LChunk(info(), graph());
551  HPhase phase("L_Building chunk", chunk_);
552  status_ = BUILDING;
553  const ZoneList<HBasicBlock*>* blocks = graph()->blocks();
554  for (int i = 0; i < blocks->length(); i++) {
555  HBasicBlock* next = NULL;
556  if (i < blocks->length() - 1) next = blocks->at(i + 1);
557  DoBasicBlock(blocks->at(i), next);
558  if (is_aborted()) return NULL;
559  }
560  status_ = DONE;
561  return chunk_;
562 }
563 
564 
565 void LChunkBuilder::Abort(const char* format, ...) {
566  if (FLAG_trace_bailout) {
567  SmartArrayPointer<char> name(
568  info()->shared_info()->DebugName()->ToCString());
569  PrintF("Aborting LChunk building in @\"%s\": ", *name);
570  va_list arguments;
571  va_start(arguments, format);
572  OS::VPrint(format, arguments);
573  va_end(arguments);
574  PrintF("\n");
575  }
576  status_ = ABORTED;
577 }
578 
579 
580 LUnallocated* LChunkBuilder::ToUnallocated(Register reg) {
581  return new(zone()) LUnallocated(LUnallocated::FIXED_REGISTER,
582  Register::ToAllocationIndex(reg));
583 }
584 
585 
586 LUnallocated* LChunkBuilder::ToUnallocated(DoubleRegister reg) {
587  return new(zone()) LUnallocated(LUnallocated::FIXED_DOUBLE_REGISTER,
588  DoubleRegister::ToAllocationIndex(reg));
589 }
590 
591 
592 LOperand* LChunkBuilder::UseFixed(HValue* value, Register fixed_register) {
593  return Use(value, ToUnallocated(fixed_register));
594 }
595 
596 
597 LOperand* LChunkBuilder::UseFixedDouble(HValue* value, DoubleRegister reg) {
598  return Use(value, ToUnallocated(reg));
599 }
600 
601 
602 LOperand* LChunkBuilder::UseRegister(HValue* value) {
603  return Use(value, new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER));
604 }
605 
606 
607 LOperand* LChunkBuilder::UseRegisterAtStart(HValue* value) {
608  return Use(value,
609  new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER,
610  LUnallocated::USED_AT_START));
611 }
612 
613 
614 LOperand* LChunkBuilder::UseTempRegister(HValue* value) {
615  return Use(value, new(zone()) LUnallocated(LUnallocated::WRITABLE_REGISTER));
616 }
617 
618 
619 LOperand* LChunkBuilder::Use(HValue* value) {
620  return Use(value, new(zone()) LUnallocated(LUnallocated::NONE));
621 }
622 
623 
624 LOperand* LChunkBuilder::UseAtStart(HValue* value) {
625  return Use(value, new(zone()) LUnallocated(LUnallocated::NONE,
626  LUnallocated::USED_AT_START));
627 }
628 
629 
630 LOperand* LChunkBuilder::UseOrConstant(HValue* value) {
631  return value->IsConstant()
632  ? chunk_->DefineConstantOperand(HConstant::cast(value))
633  : Use(value);
634 }
635 
636 
637 LOperand* LChunkBuilder::UseOrConstantAtStart(HValue* value) {
638  return value->IsConstant()
639  ? chunk_->DefineConstantOperand(HConstant::cast(value))
640  : UseAtStart(value);
641 }
642 
643 
644 LOperand* LChunkBuilder::UseRegisterOrConstant(HValue* value) {
645  return value->IsConstant()
646  ? chunk_->DefineConstantOperand(HConstant::cast(value))
647  : UseRegister(value);
648 }
649 
650 
651 LOperand* LChunkBuilder::UseRegisterOrConstantAtStart(HValue* value) {
652  return value->IsConstant()
653  ? chunk_->DefineConstantOperand(HConstant::cast(value))
654  : UseRegisterAtStart(value);
655 }
656 
657 
658 LOperand* LChunkBuilder::UseAny(HValue* value) {
659  return value->IsConstant()
660  ? chunk_->DefineConstantOperand(HConstant::cast(value))
661  : Use(value, new(zone()) LUnallocated(LUnallocated::ANY));
662 }
663 
664 
665 LOperand* LChunkBuilder::Use(HValue* value, LUnallocated* operand) {
666  if (value->EmitAtUses()) {
667  HInstruction* instr = HInstruction::cast(value);
668  VisitInstruction(instr);
669  }
670  operand->set_virtual_register(value->id());
671  return operand;
672 }
673 
674 
675 template<int I, int T>
676 LInstruction* LChunkBuilder::Define(LTemplateInstruction<1, I, T>* instr,
677  LUnallocated* result) {
678  result->set_virtual_register(current_instruction_->id());
679  instr->set_result(result);
680  return instr;
681 }
682 
683 
684 template<int I, int T>
685 LInstruction* LChunkBuilder::DefineAsRegister(
686  LTemplateInstruction<1, I, T>* instr) {
687  return Define(instr,
688  new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER));
689 }
690 
691 
692 template<int I, int T>
693 LInstruction* LChunkBuilder::DefineAsSpilled(
694  LTemplateInstruction<1, I, T>* instr, int index) {
695  return Define(instr,
696  new(zone()) LUnallocated(LUnallocated::FIXED_SLOT, index));
697 }
698 
699 
700 template<int I, int T>
701 LInstruction* LChunkBuilder::DefineSameAsFirst(
702  LTemplateInstruction<1, I, T>* instr) {
703  return Define(instr,
704  new(zone()) LUnallocated(LUnallocated::SAME_AS_FIRST_INPUT));
705 }
706 
707 
708 template<int I, int T>
709 LInstruction* LChunkBuilder::DefineFixed(
710  LTemplateInstruction<1, I, T>* instr, Register reg) {
711  return Define(instr, ToUnallocated(reg));
712 }
713 
714 
715 template<int I, int T>
716 LInstruction* LChunkBuilder::DefineFixedDouble(
717  LTemplateInstruction<1, I, T>* instr, DoubleRegister reg) {
718  return Define(instr, ToUnallocated(reg));
719 }
720 
721 
722 LInstruction* LChunkBuilder::AssignEnvironment(LInstruction* instr) {
723  HEnvironment* hydrogen_env = current_block_->last_environment();
724  int argument_index_accumulator = 0;
725  instr->set_environment(CreateEnvironment(hydrogen_env,
726  &argument_index_accumulator));
727  return instr;
728 }
729 
730 
731 LInstruction* LChunkBuilder::MarkAsCall(LInstruction* instr,
732  HInstruction* hinstr,
733  CanDeoptimize can_deoptimize) {
734 #ifdef DEBUG
735  instr->VerifyCall();
736 #endif
737  instr->MarkAsCall();
738  instr = AssignPointerMap(instr);
739 
740  if (hinstr->HasObservableSideEffects()) {
741  ASSERT(hinstr->next()->IsSimulate());
742  HSimulate* sim = HSimulate::cast(hinstr->next());
743  ASSERT(instruction_pending_deoptimization_environment_ == NULL);
744  ASSERT(pending_deoptimization_ast_id_ == AstNode::kNoNumber);
745  instruction_pending_deoptimization_environment_ = instr;
746  pending_deoptimization_ast_id_ = sim->ast_id();
747  }
748 
749  // If instruction does not have side-effects lazy deoptimization
750  // after the call will try to deoptimize to the point before the call.
751  // Thus we still need to attach environment to this call even if
752  // call sequence can not deoptimize eagerly.
753  bool needs_environment =
754  (can_deoptimize == CAN_DEOPTIMIZE_EAGERLY) ||
755  !hinstr->HasObservableSideEffects();
756  if (needs_environment && !instr->HasEnvironment()) {
757  instr = AssignEnvironment(instr);
758  }
759 
760  return instr;
761 }
762 
763 
764 LInstruction* LChunkBuilder::AssignPointerMap(LInstruction* instr) {
765  ASSERT(!instr->HasPointerMap());
766  instr->set_pointer_map(new(zone()) LPointerMap(position_, zone()));
767  return instr;
768 }
769 
770 
771 LUnallocated* LChunkBuilder::TempRegister() {
772  LUnallocated* operand =
773  new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER);
774  operand->set_virtual_register(allocator_->GetVirtualRegister());
775  if (!allocator_->AllocationOk()) Abort("Not enough virtual registers.");
776  return operand;
777 }
778 
779 
780 LOperand* LChunkBuilder::FixedTemp(Register reg) {
781  LUnallocated* operand = ToUnallocated(reg);
782  ASSERT(operand->HasFixedPolicy());
783  return operand;
784 }
785 
786 
787 LOperand* LChunkBuilder::FixedTemp(DoubleRegister reg) {
788  LUnallocated* operand = ToUnallocated(reg);
789  ASSERT(operand->HasFixedPolicy());
790  return operand;
791 }
792 
793 
794 LInstruction* LChunkBuilder::DoBlockEntry(HBlockEntry* instr) {
795  return new(zone()) LLabel(instr->block());
796 }
797 
798 
799 LInstruction* LChunkBuilder::DoSoftDeoptimize(HSoftDeoptimize* instr) {
800  return AssignEnvironment(new(zone()) LDeoptimize);
801 }
802 
803 
804 LInstruction* LChunkBuilder::DoDeoptimize(HDeoptimize* instr) {
805  return AssignEnvironment(new(zone()) LDeoptimize);
806 }
807 
808 
809 LInstruction* LChunkBuilder::DoShift(Token::Value op,
810  HBitwiseBinaryOperation* instr) {
811  if (instr->representation().IsTagged()) {
812  ASSERT(instr->left()->representation().IsTagged());
813  ASSERT(instr->right()->representation().IsTagged());
814 
815  LOperand* left = UseFixed(instr->left(), a1);
816  LOperand* right = UseFixed(instr->right(), a0);
817  LArithmeticT* result = new(zone()) LArithmeticT(op, left, right);
818  return MarkAsCall(DefineFixed(result, v0), instr);
819  }
820 
821  ASSERT(instr->representation().IsInteger32());
822  ASSERT(instr->left()->representation().IsInteger32());
823  ASSERT(instr->right()->representation().IsInteger32());
824  LOperand* left = UseRegisterAtStart(instr->left());
825 
826  HValue* right_value = instr->right();
827  LOperand* right = NULL;
828  int constant_value = 0;
829  if (right_value->IsConstant()) {
830  HConstant* constant = HConstant::cast(right_value);
831  right = chunk_->DefineConstantOperand(constant);
832  constant_value = constant->Integer32Value() & 0x1f;
833  } else {
834  right = UseRegisterAtStart(right_value);
835  }
836 
837  // Shift operations can only deoptimize if we do a logical shift
838  // by 0 and the result cannot be truncated to int32.
839  bool may_deopt = (op == Token::SHR && constant_value == 0);
840  bool does_deopt = false;
841  if (may_deopt) {
842  for (HUseIterator it(instr->uses()); !it.Done(); it.Advance()) {
843  if (!it.value()->CheckFlag(HValue::kTruncatingToInt32)) {
844  does_deopt = true;
845  break;
846  }
847  }
848  }
849 
850  LInstruction* result =
851  DefineAsRegister(new(zone()) LShiftI(op, left, right, does_deopt));
852  return does_deopt ? AssignEnvironment(result) : result;
853 }
854 
855 
856 LInstruction* LChunkBuilder::DoArithmeticD(Token::Value op,
857  HArithmeticBinaryOperation* instr) {
858  ASSERT(instr->representation().IsDouble());
859  ASSERT(instr->left()->representation().IsDouble());
860  ASSERT(instr->right()->representation().IsDouble());
861  ASSERT(op != Token::MOD);
862  LOperand* left = UseRegisterAtStart(instr->left());
863  LOperand* right = UseRegisterAtStart(instr->right());
864  LArithmeticD* result = new(zone()) LArithmeticD(op, left, right);
865  return DefineAsRegister(result);
866 }
867 
868 
869 LInstruction* LChunkBuilder::DoArithmeticT(Token::Value op,
870  HArithmeticBinaryOperation* instr) {
871  ASSERT(op == Token::ADD ||
872  op == Token::DIV ||
873  op == Token::MOD ||
874  op == Token::MUL ||
875  op == Token::SUB);
876  HValue* left = instr->left();
877  HValue* right = instr->right();
878  ASSERT(left->representation().IsTagged());
879  ASSERT(right->representation().IsTagged());
880  LOperand* left_operand = UseFixed(left, a1);
881  LOperand* right_operand = UseFixed(right, a0);
882  LArithmeticT* result =
883  new(zone()) LArithmeticT(op, left_operand, right_operand);
884  return MarkAsCall(DefineFixed(result, v0), instr);
885 }
886 
887 
888 void LChunkBuilder::DoBasicBlock(HBasicBlock* block, HBasicBlock* next_block) {
889  ASSERT(is_building());
890  current_block_ = block;
891  next_block_ = next_block;
892  if (block->IsStartBlock()) {
893  block->UpdateEnvironment(graph_->start_environment());
894  argument_count_ = 0;
895  } else if (block->predecessors()->length() == 1) {
896  // We have a single predecessor => copy environment and outgoing
897  // argument count from the predecessor.
898  ASSERT(block->phis()->length() == 0);
899  HBasicBlock* pred = block->predecessors()->at(0);
900  HEnvironment* last_environment = pred->last_environment();
901  ASSERT(last_environment != NULL);
902  // Only copy the environment, if it is later used again.
903  if (pred->end()->SecondSuccessor() == NULL) {
904  ASSERT(pred->end()->FirstSuccessor() == block);
905  } else {
906  if (pred->end()->FirstSuccessor()->block_id() > block->block_id() ||
907  pred->end()->SecondSuccessor()->block_id() > block->block_id()) {
908  last_environment = last_environment->Copy();
909  }
910  }
911  block->UpdateEnvironment(last_environment);
912  ASSERT(pred->argument_count() >= 0);
913  argument_count_ = pred->argument_count();
914  } else {
915  // We are at a state join => process phis.
916  HBasicBlock* pred = block->predecessors()->at(0);
917  // No need to copy the environment, it cannot be used later.
918  HEnvironment* last_environment = pred->last_environment();
919  for (int i = 0; i < block->phis()->length(); ++i) {
920  HPhi* phi = block->phis()->at(i);
921  last_environment->SetValueAt(phi->merged_index(), phi);
922  }
923  for (int i = 0; i < block->deleted_phis()->length(); ++i) {
924  last_environment->SetValueAt(block->deleted_phis()->at(i),
925  graph_->GetConstantUndefined());
926  }
927  block->UpdateEnvironment(last_environment);
928  // Pick up the outgoing argument count of one of the predecessors.
929  argument_count_ = pred->argument_count();
930  }
931  HInstruction* current = block->first();
932  int start = chunk_->instructions()->length();
933  while (current != NULL && !is_aborted()) {
934  // Code for constants in registers is generated lazily.
935  if (!current->EmitAtUses()) {
936  VisitInstruction(current);
937  }
938  current = current->next();
939  }
940  int end = chunk_->instructions()->length() - 1;
941  if (end >= start) {
942  block->set_first_instruction_index(start);
943  block->set_last_instruction_index(end);
944  }
945  block->set_argument_count(argument_count_);
946  next_block_ = NULL;
947  current_block_ = NULL;
948 }
949 
950 
951 void LChunkBuilder::VisitInstruction(HInstruction* current) {
952  HInstruction* old_current = current_instruction_;
953  current_instruction_ = current;
954  if (current->has_position()) position_ = current->position();
955  LInstruction* instr = current->CompileToLithium(this);
956 
957  if (instr != NULL) {
958  if (FLAG_stress_pointer_maps && !instr->HasPointerMap()) {
959  instr = AssignPointerMap(instr);
960  }
961  if (FLAG_stress_environments && !instr->HasEnvironment()) {
962  instr = AssignEnvironment(instr);
963  }
964  instr->set_hydrogen_value(current);
965  chunk_->AddInstruction(instr, current_block_);
966  }
967  current_instruction_ = old_current;
968 }
969 
970 
971 LEnvironment* LChunkBuilder::CreateEnvironment(
972  HEnvironment* hydrogen_env,
973  int* argument_index_accumulator) {
974  if (hydrogen_env == NULL) return NULL;
975 
976  LEnvironment* outer =
977  CreateEnvironment(hydrogen_env->outer(), argument_index_accumulator);
978  int ast_id = hydrogen_env->ast_id();
979  ASSERT(ast_id != AstNode::kNoNumber ||
980  hydrogen_env->frame_type() != JS_FUNCTION);
981  int value_count = hydrogen_env->length();
982  LEnvironment* result = new(zone()) LEnvironment(
983  hydrogen_env->closure(),
984  hydrogen_env->frame_type(),
985  ast_id,
986  hydrogen_env->parameter_count(),
987  argument_count_,
988  value_count,
989  outer,
990  zone());
991  int argument_index = *argument_index_accumulator;
992  for (int i = 0; i < value_count; ++i) {
993  if (hydrogen_env->is_special_index(i)) continue;
994 
995  HValue* value = hydrogen_env->values()->at(i);
996  LOperand* op = NULL;
997  if (value->IsArgumentsObject()) {
998  op = NULL;
999  } else if (value->IsPushArgument()) {
1000  op = new(zone()) LArgument(argument_index++);
1001  } else {
1002  op = UseAny(value);
1003  }
1004  result->AddValue(op, value->representation());
1005  }
1006 
1007  if (hydrogen_env->frame_type() == JS_FUNCTION) {
1008  *argument_index_accumulator = argument_index;
1009  }
1010 
1011  return result;
1012 }
1013 
1014 
1015 LInstruction* LChunkBuilder::DoGoto(HGoto* instr) {
1016  return new(zone()) LGoto(instr->FirstSuccessor()->block_id());
1017 }
1018 
1019 
1020 LInstruction* LChunkBuilder::DoBranch(HBranch* instr) {
1021  HValue* value = instr->value();
1022  if (value->EmitAtUses()) {
1023  HBasicBlock* successor = HConstant::cast(value)->ToBoolean()
1024  ? instr->FirstSuccessor()
1025  : instr->SecondSuccessor();
1026  return new(zone()) LGoto(successor->block_id());
1027  }
1028 
1029  LBranch* result = new(zone()) LBranch(UseRegister(value));
1030  // Tagged values that are not known smis or booleans require a
1031  // deoptimization environment.
1032  Representation rep = value->representation();
1033  HType type = value->type();
1034  if (rep.IsTagged() && !type.IsSmi() && !type.IsBoolean()) {
1035  return AssignEnvironment(result);
1036  }
1037  return result;
1038 }
1039 
1040 
1041 LInstruction* LChunkBuilder::DoCompareMap(HCompareMap* instr) {
1042  ASSERT(instr->value()->representation().IsTagged());
1043  LOperand* value = UseRegisterAtStart(instr->value());
1044  LOperand* temp = TempRegister();
1045  return new(zone()) LCmpMapAndBranch(value, temp);
1046 }
1047 
1048 
1049 LInstruction* LChunkBuilder::DoArgumentsLength(HArgumentsLength* length) {
1050  return DefineAsRegister(
1051  new(zone()) LArgumentsLength(UseRegister(length->value())));
1052 }
1053 
1054 
1055 LInstruction* LChunkBuilder::DoArgumentsElements(HArgumentsElements* elems) {
1056  return DefineAsRegister(new(zone()) LArgumentsElements);
1057 }
1058 
1059 
1060 LInstruction* LChunkBuilder::DoInstanceOf(HInstanceOf* instr) {
1061  LInstanceOf* result =
1062  new(zone()) LInstanceOf(UseFixed(instr->left(), a0),
1063  UseFixed(instr->right(), a1));
1064  return MarkAsCall(DefineFixed(result, v0), instr);
1065 }
1066 
1067 
1068 LInstruction* LChunkBuilder::DoInstanceOfKnownGlobal(
1069  HInstanceOfKnownGlobal* instr) {
1070  LInstanceOfKnownGlobal* result =
1071  new(zone()) LInstanceOfKnownGlobal(UseFixed(instr->left(), a0),
1072  FixedTemp(t0));
1073  return MarkAsCall(DefineFixed(result, v0), instr);
1074 }
1075 
1076 
1077 LInstruction* LChunkBuilder::DoWrapReceiver(HWrapReceiver* instr) {
1078  LOperand* receiver = UseRegisterAtStart(instr->receiver());
1079  LOperand* function = UseRegisterAtStart(instr->function());
1080  LWrapReceiver* result = new(zone()) LWrapReceiver(receiver, function);
1081  return AssignEnvironment(DefineSameAsFirst(result));
1082 }
1083 
1084 
1085 LInstruction* LChunkBuilder::DoApplyArguments(HApplyArguments* instr) {
1086  LOperand* function = UseFixed(instr->function(), a1);
1087  LOperand* receiver = UseFixed(instr->receiver(), a0);
1088  LOperand* length = UseFixed(instr->length(), a2);
1089  LOperand* elements = UseFixed(instr->elements(), a3);
1090  LApplyArguments* result = new(zone()) LApplyArguments(function,
1091  receiver,
1092  length,
1093  elements);
1094  return MarkAsCall(DefineFixed(result, v0), instr, CAN_DEOPTIMIZE_EAGERLY);
1095 }
1096 
1097 
1098 LInstruction* LChunkBuilder::DoPushArgument(HPushArgument* instr) {
1099  ++argument_count_;
1100  LOperand* argument = Use(instr->argument());
1101  return new(zone()) LPushArgument(argument);
1102 }
1103 
1104 
1105 LInstruction* LChunkBuilder::DoThisFunction(HThisFunction* instr) {
1106  return instr->HasNoUses()
1107  ? NULL
1108  : DefineAsRegister(new(zone()) LThisFunction);
1109 }
1110 
1111 
1112 LInstruction* LChunkBuilder::DoContext(HContext* instr) {
1113  return instr->HasNoUses() ? NULL : DefineAsRegister(new(zone()) LContext);
1114 }
1115 
1116 
1117 LInstruction* LChunkBuilder::DoOuterContext(HOuterContext* instr) {
1118  LOperand* context = UseRegisterAtStart(instr->value());
1119  return DefineAsRegister(new(zone()) LOuterContext(context));
1120 }
1121 
1122 
1123 LInstruction* LChunkBuilder::DoDeclareGlobals(HDeclareGlobals* instr) {
1124  return MarkAsCall(new(zone()) LDeclareGlobals, instr);
1125 }
1126 
1127 
1128 LInstruction* LChunkBuilder::DoGlobalObject(HGlobalObject* instr) {
1129  LOperand* context = UseRegisterAtStart(instr->value());
1130  return DefineAsRegister(new(zone()) LGlobalObject(context));
1131 }
1132 
1133 
1134 LInstruction* LChunkBuilder::DoGlobalReceiver(HGlobalReceiver* instr) {
1135  LOperand* global_object = UseRegisterAtStart(instr->value());
1136  return DefineAsRegister(new(zone()) LGlobalReceiver(global_object));
1137 }
1138 
1139 
1140 LInstruction* LChunkBuilder::DoCallConstantFunction(
1141  HCallConstantFunction* instr) {
1142  argument_count_ -= instr->argument_count();
1143  return MarkAsCall(DefineFixed(new(zone()) LCallConstantFunction, v0), instr);
1144 }
1145 
1146 
1147 LInstruction* LChunkBuilder::DoInvokeFunction(HInvokeFunction* instr) {
1148  LOperand* function = UseFixed(instr->function(), a1);
1149  argument_count_ -= instr->argument_count();
1150  LInvokeFunction* result = new(zone()) LInvokeFunction(function);
1151  return MarkAsCall(DefineFixed(result, v0), instr, CANNOT_DEOPTIMIZE_EAGERLY);
1152 }
1153 
1154 
1155 LInstruction* LChunkBuilder::DoUnaryMathOperation(HUnaryMathOperation* instr) {
1156  BuiltinFunctionId op = instr->op();
1157  if (op == kMathLog || op == kMathSin || op == kMathCos || op == kMathTan) {
1158  LOperand* input = UseFixedDouble(instr->value(), f4);
1159  LUnaryMathOperation* result = new(zone()) LUnaryMathOperation(input, NULL);
1160  return MarkAsCall(DefineFixedDouble(result, f4), instr);
1161  } else if (op == kMathPowHalf) {
1162  // Input cannot be the same as the result.
1163  // See lithium-codegen-mips.cc::DoMathPowHalf.
1164  LOperand* input = UseFixedDouble(instr->value(), f8);
1165  LOperand* temp = FixedTemp(f6);
1166  LUnaryMathOperation* result = new(zone()) LUnaryMathOperation(input, temp);
1167  return DefineFixedDouble(result, f4);
1168  } else {
1169  LOperand* input = UseRegisterAtStart(instr->value());
1170  LOperand* temp = (op == kMathFloor) ? TempRegister() : NULL;
1171  LUnaryMathOperation* result = new(zone()) LUnaryMathOperation(input, temp);
1172  switch (op) {
1173  case kMathAbs:
1174  return AssignEnvironment(AssignPointerMap(DefineAsRegister(result)));
1175  case kMathFloor:
1176  return AssignEnvironment(AssignPointerMap(DefineAsRegister(result)));
1177  case kMathSqrt:
1178  return DefineAsRegister(result);
1179  case kMathRound:
1180  return AssignEnvironment(DefineAsRegister(result));
1181  default:
1182  UNREACHABLE();
1183  return NULL;
1184  }
1185  }
1186 }
1187 
1188 
1189 LInstruction* LChunkBuilder::DoCallKeyed(HCallKeyed* instr) {
1190  ASSERT(instr->key()->representation().IsTagged());
1191  argument_count_ -= instr->argument_count();
1192  LOperand* key = UseFixed(instr->key(), a2);
1193  return MarkAsCall(DefineFixed(new(zone()) LCallKeyed(key), v0), instr);
1194 }
1195 
1196 
1197 LInstruction* LChunkBuilder::DoCallNamed(HCallNamed* instr) {
1198  argument_count_ -= instr->argument_count();
1199  return MarkAsCall(DefineFixed(new(zone()) LCallNamed, v0), instr);
1200 }
1201 
1202 
1203 LInstruction* LChunkBuilder::DoCallGlobal(HCallGlobal* instr) {
1204  argument_count_ -= instr->argument_count();
1205  return MarkAsCall(DefineFixed(new(zone()) LCallGlobal, v0), instr);
1206 }
1207 
1208 
1209 LInstruction* LChunkBuilder::DoCallKnownGlobal(HCallKnownGlobal* instr) {
1210  argument_count_ -= instr->argument_count();
1211  return MarkAsCall(DefineFixed(new(zone()) LCallKnownGlobal, v0), instr);
1212 }
1213 
1214 
1215 LInstruction* LChunkBuilder::DoCallNew(HCallNew* instr) {
1216  LOperand* constructor = UseFixed(instr->constructor(), a1);
1217  argument_count_ -= instr->argument_count();
1218  LCallNew* result = new(zone()) LCallNew(constructor);
1219  return MarkAsCall(DefineFixed(result, v0), instr);
1220 }
1221 
1222 
1223 LInstruction* LChunkBuilder::DoCallFunction(HCallFunction* instr) {
1224  LOperand* function = UseFixed(instr->function(), a1);
1225  argument_count_ -= instr->argument_count();
1226  return MarkAsCall(DefineFixed(new(zone()) LCallFunction(function), v0),
1227  instr);
1228 }
1229 
1230 
1231 LInstruction* LChunkBuilder::DoCallRuntime(HCallRuntime* instr) {
1232  argument_count_ -= instr->argument_count();
1233  return MarkAsCall(DefineFixed(new(zone()) LCallRuntime, v0), instr);
1234 }
1235 
1236 
1237 LInstruction* LChunkBuilder::DoShr(HShr* instr) {
1238  return DoShift(Token::SHR, instr);
1239 }
1240 
1241 
1242 LInstruction* LChunkBuilder::DoSar(HSar* instr) {
1243  return DoShift(Token::SAR, instr);
1244 }
1245 
1246 
1247 LInstruction* LChunkBuilder::DoShl(HShl* instr) {
1248  return DoShift(Token::SHL, instr);
1249 }
1250 
1251 
1252 LInstruction* LChunkBuilder::DoBitwise(HBitwise* instr) {
1253  if (instr->representation().IsInteger32()) {
1254  ASSERT(instr->left()->representation().IsInteger32());
1255  ASSERT(instr->right()->representation().IsInteger32());
1256 
1257  LOperand* left = UseRegisterAtStart(instr->LeastConstantOperand());
1258  LOperand* right = UseOrConstantAtStart(instr->MostConstantOperand());
1259  return DefineAsRegister(new(zone()) LBitI(left, right));
1260  } else {
1261  ASSERT(instr->representation().IsTagged());
1262  ASSERT(instr->left()->representation().IsTagged());
1263  ASSERT(instr->right()->representation().IsTagged());
1264 
1265  LOperand* left = UseFixed(instr->left(), a1);
1266  LOperand* right = UseFixed(instr->right(), a0);
1267  LArithmeticT* result = new(zone()) LArithmeticT(instr->op(), left, right);
1268  return MarkAsCall(DefineFixed(result, v0), instr);
1269  }
1270 }
1271 
1272 
1273 LInstruction* LChunkBuilder::DoBitNot(HBitNot* instr) {
1274  ASSERT(instr->value()->representation().IsInteger32());
1275  ASSERT(instr->representation().IsInteger32());
1276  if (instr->HasNoUses()) return NULL;
1277  LOperand* value = UseRegisterAtStart(instr->value());
1278  return DefineAsRegister(new(zone()) LBitNotI(value));
1279 }
1280 
1281 
1282 LInstruction* LChunkBuilder::DoDiv(HDiv* instr) {
1283  if (instr->representation().IsDouble()) {
1284  return DoArithmeticD(Token::DIV, instr);
1285  } else if (instr->representation().IsInteger32()) {
1286  // TODO(1042) The fixed register allocation
1287  // is needed because we call TypeRecordingBinaryOpStub from
1288  // the generated code, which requires registers a0
1289  // and a1 to be used. We should remove that
1290  // when we provide a native implementation.
1291  LOperand* dividend = UseFixed(instr->left(), a0);
1292  LOperand* divisor = UseFixed(instr->right(), a1);
1293  return AssignEnvironment(AssignPointerMap(
1294  DefineFixed(new(zone()) LDivI(dividend, divisor), v0)));
1295  } else {
1296  return DoArithmeticT(Token::DIV, instr);
1297  }
1298 }
1299 
1300 
1301 LInstruction* LChunkBuilder::DoMathFloorOfDiv(HMathFloorOfDiv* instr) {
1302  UNIMPLEMENTED();
1303  return NULL;
1304 }
1305 
1306 
1307 LInstruction* LChunkBuilder::DoMod(HMod* instr) {
1308  if (instr->representation().IsInteger32()) {
1309  ASSERT(instr->left()->representation().IsInteger32());
1310  ASSERT(instr->right()->representation().IsInteger32());
1311 
1312  LModI* mod;
1313  if (instr->HasPowerOf2Divisor()) {
1314  ASSERT(!instr->CheckFlag(HValue::kCanBeDivByZero));
1315  LOperand* value = UseRegisterAtStart(instr->left());
1316  mod = new(zone()) LModI(value, UseOrConstant(instr->right()));
1317  } else {
1318  LOperand* dividend = UseRegister(instr->left());
1319  LOperand* divisor = UseRegister(instr->right());
1320  mod = new(zone()) LModI(dividend,
1321  divisor,
1322  TempRegister(),
1323  FixedTemp(f20),
1324  FixedTemp(f22));
1325  }
1326 
1327  if (instr->CheckFlag(HValue::kBailoutOnMinusZero) ||
1328  instr->CheckFlag(HValue::kCanBeDivByZero)) {
1329  return AssignEnvironment(DefineAsRegister(mod));
1330  } else {
1331  return DefineAsRegister(mod);
1332  }
1333  } else if (instr->representation().IsTagged()) {
1334  return DoArithmeticT(Token::MOD, instr);
1335  } else {
1336  ASSERT(instr->representation().IsDouble());
1337  // We call a C function for double modulo. It can't trigger a GC.
1338  // We need to use fixed result register for the call.
1339  // TODO(fschneider): Allow any register as input registers.
1340  LOperand* left = UseFixedDouble(instr->left(), f2);
1341  LOperand* right = UseFixedDouble(instr->right(), f4);
1342  LArithmeticD* result = new(zone()) LArithmeticD(Token::MOD, left, right);
1343  return MarkAsCall(DefineFixedDouble(result, f2), instr);
1344  }
1345 }
1346 
1347 
1348 LInstruction* LChunkBuilder::DoMul(HMul* instr) {
1349  if (instr->representation().IsInteger32()) {
1350  ASSERT(instr->left()->representation().IsInteger32());
1351  ASSERT(instr->right()->representation().IsInteger32());
1352  LOperand* left;
1353  LOperand* right = UseOrConstant(instr->MostConstantOperand());
1354  LOperand* temp = NULL;
1355  if (instr->CheckFlag(HValue::kBailoutOnMinusZero) &&
1356  (instr->CheckFlag(HValue::kCanOverflow) ||
1357  !right->IsConstantOperand())) {
1358  left = UseRegister(instr->LeastConstantOperand());
1359  temp = TempRegister();
1360  } else {
1361  left = UseRegisterAtStart(instr->LeastConstantOperand());
1362  }
1363  LMulI* mul = new(zone()) LMulI(left, right, temp);
1364  if (instr->CheckFlag(HValue::kCanOverflow) ||
1365  instr->CheckFlag(HValue::kBailoutOnMinusZero)) {
1366  AssignEnvironment(mul);
1367  }
1368  return DefineAsRegister(mul);
1369 
1370  } else if (instr->representation().IsDouble()) {
1371  return DoArithmeticD(Token::MUL, instr);
1372 
1373  } else {
1374  return DoArithmeticT(Token::MUL, instr);
1375  }
1376 }
1377 
1378 
1379 LInstruction* LChunkBuilder::DoSub(HSub* instr) {
1380  if (instr->representation().IsInteger32()) {
1381  ASSERT(instr->left()->representation().IsInteger32());
1382  ASSERT(instr->right()->representation().IsInteger32());
1383  LOperand* left = UseRegisterAtStart(instr->left());
1384  LOperand* right = UseOrConstantAtStart(instr->right());
1385  LSubI* sub = new(zone()) LSubI(left, right);
1386  LInstruction* result = DefineAsRegister(sub);
1387  if (instr->CheckFlag(HValue::kCanOverflow)) {
1388  result = AssignEnvironment(result);
1389  }
1390  return result;
1391  } else if (instr->representation().IsDouble()) {
1392  return DoArithmeticD(Token::SUB, instr);
1393  } else {
1394  return DoArithmeticT(Token::SUB, instr);
1395  }
1396 }
1397 
1398 
1399 LInstruction* LChunkBuilder::DoAdd(HAdd* instr) {
1400  if (instr->representation().IsInteger32()) {
1401  ASSERT(instr->left()->representation().IsInteger32());
1402  ASSERT(instr->right()->representation().IsInteger32());
1403  LOperand* left = UseRegisterAtStart(instr->LeastConstantOperand());
1404  LOperand* right = UseOrConstantAtStart(instr->MostConstantOperand());
1405  LAddI* add = new(zone()) LAddI(left, right);
1406  LInstruction* result = DefineAsRegister(add);
1407  if (instr->CheckFlag(HValue::kCanOverflow)) {
1408  result = AssignEnvironment(result);
1409  }
1410  return result;
1411  } else if (instr->representation().IsDouble()) {
1412  return DoArithmeticD(Token::ADD, instr);
1413  } else {
1414  ASSERT(instr->representation().IsTagged());
1415  return DoArithmeticT(Token::ADD, instr);
1416  }
1417 }
1418 
1419 
1420 LInstruction* LChunkBuilder::DoPower(HPower* instr) {
1421  ASSERT(instr->representation().IsDouble());
1422  // We call a C function for double power. It can't trigger a GC.
1423  // We need to use fixed result register for the call.
1424  Representation exponent_type = instr->right()->representation();
1425  ASSERT(instr->left()->representation().IsDouble());
1426  LOperand* left = UseFixedDouble(instr->left(), f2);
1427  LOperand* right = exponent_type.IsDouble() ?
1428  UseFixedDouble(instr->right(), f4) :
1429  UseFixed(instr->right(), a2);
1430  LPower* result = new(zone()) LPower(left, right);
1431  return MarkAsCall(DefineFixedDouble(result, f0),
1432  instr,
1433  CAN_DEOPTIMIZE_EAGERLY);
1434 }
1435 
1436 
1437 LInstruction* LChunkBuilder::DoRandom(HRandom* instr) {
1438  ASSERT(instr->representation().IsDouble());
1439  ASSERT(instr->global_object()->representation().IsTagged());
1440  LOperand* global_object = UseFixed(instr->global_object(), a0);
1441  LRandom* result = new(zone()) LRandom(global_object);
1442  return MarkAsCall(DefineFixedDouble(result, f0), instr);
1443 }
1444 
1445 
1446 LInstruction* LChunkBuilder::DoCompareGeneric(HCompareGeneric* instr) {
1447  ASSERT(instr->left()->representation().IsTagged());
1448  ASSERT(instr->right()->representation().IsTagged());
1449  LOperand* left = UseFixed(instr->left(), a1);
1450  LOperand* right = UseFixed(instr->right(), a0);
1451  LCmpT* result = new(zone()) LCmpT(left, right);
1452  return MarkAsCall(DefineFixed(result, v0), instr);
1453 }
1454 
1455 
1456 LInstruction* LChunkBuilder::DoCompareIDAndBranch(
1457  HCompareIDAndBranch* instr) {
1458  Representation r = instr->GetInputRepresentation();
1459  if (r.IsInteger32()) {
1460  ASSERT(instr->left()->representation().IsInteger32());
1461  ASSERT(instr->right()->representation().IsInteger32());
1462  LOperand* left = UseRegisterOrConstantAtStart(instr->left());
1463  LOperand* right = UseRegisterOrConstantAtStart(instr->right());
1464  return new(zone()) LCmpIDAndBranch(left, right);
1465  } else {
1466  ASSERT(r.IsDouble());
1467  ASSERT(instr->left()->representation().IsDouble());
1468  ASSERT(instr->right()->representation().IsDouble());
1469  LOperand* left = UseRegisterAtStart(instr->left());
1470  LOperand* right = UseRegisterAtStart(instr->right());
1471  return new(zone()) LCmpIDAndBranch(left, right);
1472  }
1473 }
1474 
1475 
1476 LInstruction* LChunkBuilder::DoCompareObjectEqAndBranch(
1477  HCompareObjectEqAndBranch* instr) {
1478  LOperand* left = UseRegisterAtStart(instr->left());
1479  LOperand* right = UseRegisterAtStart(instr->right());
1480  return new(zone()) LCmpObjectEqAndBranch(left, right);
1481 }
1482 
1483 
1484 LInstruction* LChunkBuilder::DoCompareConstantEqAndBranch(
1485  HCompareConstantEqAndBranch* instr) {
1486  return new(zone()) LCmpConstantEqAndBranch(
1487  UseRegisterAtStart(instr->value()));
1488 }
1489 
1490 
1491 LInstruction* LChunkBuilder::DoIsNilAndBranch(HIsNilAndBranch* instr) {
1492  ASSERT(instr->value()->representation().IsTagged());
1493  return new(zone()) LIsNilAndBranch(UseRegisterAtStart(instr->value()));
1494 }
1495 
1496 
1497 LInstruction* LChunkBuilder::DoIsObjectAndBranch(HIsObjectAndBranch* instr) {
1498  ASSERT(instr->value()->representation().IsTagged());
1499  LOperand* temp = TempRegister();
1500  return new(zone()) LIsObjectAndBranch(UseRegisterAtStart(instr->value()),
1501  temp);
1502 }
1503 
1504 
1505 LInstruction* LChunkBuilder::DoIsStringAndBranch(HIsStringAndBranch* instr) {
1506  ASSERT(instr->value()->representation().IsTagged());
1507  LOperand* temp = TempRegister();
1508  return new(zone()) LIsStringAndBranch(UseRegisterAtStart(instr->value()),
1509  temp);
1510 }
1511 
1512 
1513 LInstruction* LChunkBuilder::DoIsSmiAndBranch(HIsSmiAndBranch* instr) {
1514  ASSERT(instr->value()->representation().IsTagged());
1515  return new(zone()) LIsSmiAndBranch(Use(instr->value()));
1516 }
1517 
1518 
1519 LInstruction* LChunkBuilder::DoIsUndetectableAndBranch(
1520  HIsUndetectableAndBranch* instr) {
1521  ASSERT(instr->value()->representation().IsTagged());
1522  return new(zone()) LIsUndetectableAndBranch(
1523  UseRegisterAtStart(instr->value()), TempRegister());
1524 }
1525 
1526 
1527 LInstruction* LChunkBuilder::DoStringCompareAndBranch(
1528  HStringCompareAndBranch* instr) {
1529  ASSERT(instr->left()->representation().IsTagged());
1530  ASSERT(instr->right()->representation().IsTagged());
1531  LOperand* left = UseFixed(instr->left(), a1);
1532  LOperand* right = UseFixed(instr->right(), a0);
1533  LStringCompareAndBranch* result =
1534  new(zone()) LStringCompareAndBranch(left, right);
1535  return MarkAsCall(result, instr);
1536 }
1537 
1538 
1539 LInstruction* LChunkBuilder::DoHasInstanceTypeAndBranch(
1540  HHasInstanceTypeAndBranch* instr) {
1541  ASSERT(instr->value()->representation().IsTagged());
1542  LOperand* value = UseRegisterAtStart(instr->value());
1543  return new(zone()) LHasInstanceTypeAndBranch(value);
1544 }
1545 
1546 
1547 LInstruction* LChunkBuilder::DoGetCachedArrayIndex(
1548  HGetCachedArrayIndex* instr) {
1549  ASSERT(instr->value()->representation().IsTagged());
1550  LOperand* value = UseRegisterAtStart(instr->value());
1551 
1552  return DefineAsRegister(new(zone()) LGetCachedArrayIndex(value));
1553 }
1554 
1555 
1556 LInstruction* LChunkBuilder::DoHasCachedArrayIndexAndBranch(
1557  HHasCachedArrayIndexAndBranch* instr) {
1558  ASSERT(instr->value()->representation().IsTagged());
1559  return new(zone()) LHasCachedArrayIndexAndBranch(
1560  UseRegisterAtStart(instr->value()));
1561 }
1562 
1563 
1564 LInstruction* LChunkBuilder::DoClassOfTestAndBranch(
1565  HClassOfTestAndBranch* instr) {
1566  ASSERT(instr->value()->representation().IsTagged());
1567  return new(zone()) LClassOfTestAndBranch(UseRegister(instr->value()),
1568  TempRegister());
1569 }
1570 
1571 
1572 LInstruction* LChunkBuilder::DoJSArrayLength(HJSArrayLength* instr) {
1573  LOperand* array = UseRegisterAtStart(instr->value());
1574  return DefineAsRegister(new(zone()) LJSArrayLength(array));
1575 }
1576 
1577 
1578 LInstruction* LChunkBuilder::DoFixedArrayBaseLength(
1579  HFixedArrayBaseLength* instr) {
1580  LOperand* array = UseRegisterAtStart(instr->value());
1581  return DefineAsRegister(new(zone()) LFixedArrayBaseLength(array));
1582 }
1583 
1584 
1585 LInstruction* LChunkBuilder::DoElementsKind(HElementsKind* instr) {
1586  LOperand* object = UseRegisterAtStart(instr->value());
1587  return DefineAsRegister(new(zone()) LElementsKind(object));
1588 }
1589 
1590 
1591 LInstruction* LChunkBuilder::DoValueOf(HValueOf* instr) {
1592  LOperand* object = UseRegister(instr->value());
1593  LValueOf* result = new(zone()) LValueOf(object, TempRegister());
1594  return DefineAsRegister(result);
1595 }
1596 
1597 
1598 LInstruction* LChunkBuilder::DoDateField(HDateField* instr) {
1599  LOperand* object = UseFixed(instr->value(), a0);
1600  LDateField* result =
1601  new(zone()) LDateField(object, FixedTemp(a1), instr->index());
1602  return MarkAsCall(DefineFixed(result, v0), instr);
1603 }
1604 
1605 
1606 LInstruction* LChunkBuilder::DoBoundsCheck(HBoundsCheck* instr) {
1607  LOperand* value = UseRegisterAtStart(instr->index());
1608  LOperand* length = UseRegister(instr->length());
1609  return AssignEnvironment(new(zone()) LBoundsCheck(value, length));
1610 }
1611 
1612 
1613 LInstruction* LChunkBuilder::DoAbnormalExit(HAbnormalExit* instr) {
1614  // The control instruction marking the end of a block that completed
1615  // abruptly (e.g., threw an exception). There is nothing specific to do.
1616  return NULL;
1617 }
1618 
1619 
1620 LInstruction* LChunkBuilder::DoThrow(HThrow* instr) {
1621  LOperand* value = UseFixed(instr->value(), a0);
1622  return MarkAsCall(new(zone()) LThrow(value), instr);
1623 }
1624 
1625 
1626 LInstruction* LChunkBuilder::DoUseConst(HUseConst* instr) {
1627  return NULL;
1628 }
1629 
1630 
1631 LInstruction* LChunkBuilder::DoForceRepresentation(HForceRepresentation* bad) {
1632  // All HForceRepresentation instructions should be eliminated in the
1633  // representation change phase of Hydrogen.
1634  UNREACHABLE();
1635  return NULL;
1636 }
1637 
1638 
1639 LInstruction* LChunkBuilder::DoChange(HChange* instr) {
1640  Representation from = instr->from();
1641  Representation to = instr->to();
1642  if (from.IsTagged()) {
1643  if (to.IsDouble()) {
1644  LOperand* value = UseRegister(instr->value());
1645  LNumberUntagD* res = new(zone()) LNumberUntagD(value);
1646  return AssignEnvironment(DefineAsRegister(res));
1647  } else {
1648  ASSERT(to.IsInteger32());
1649  LOperand* value = UseRegisterAtStart(instr->value());
1650  LInstruction* res = NULL;
1651  if (instr->value()->type().IsSmi()) {
1652  res = DefineAsRegister(new(zone()) LSmiUntag(value, false));
1653  } else {
1654  LOperand* temp1 = TempRegister();
1655  LOperand* temp2 = instr->CanTruncateToInt32() ? TempRegister()
1656  : NULL;
1657  LOperand* temp3 = instr->CanTruncateToInt32() ? FixedTemp(f22)
1658  : NULL;
1659  res = DefineSameAsFirst(new(zone()) LTaggedToI(value,
1660  temp1,
1661  temp2,
1662  temp3));
1663  res = AssignEnvironment(res);
1664  }
1665  return res;
1666  }
1667  } else if (from.IsDouble()) {
1668  if (to.IsTagged()) {
1669  LOperand* value = UseRegister(instr->value());
1670  LOperand* temp1 = TempRegister();
1671  LOperand* temp2 = TempRegister();
1672 
1673  // Make sure that the temp and result_temp registers are
1674  // different.
1675  LUnallocated* result_temp = TempRegister();
1676  LNumberTagD* result = new(zone()) LNumberTagD(value, temp1, temp2);
1677  Define(result, result_temp);
1678  return AssignPointerMap(result);
1679  } else {
1680  ASSERT(to.IsInteger32());
1681  LOperand* value = UseRegister(instr->value());
1682  LOperand* temp1 = TempRegister();
1683  LOperand* temp2 = instr->CanTruncateToInt32() ? TempRegister() : NULL;
1684  LDoubleToI* res = new(zone()) LDoubleToI(value, temp1, temp2);
1685  return AssignEnvironment(DefineAsRegister(res));
1686  }
1687  } else if (from.IsInteger32()) {
1688  if (to.IsTagged()) {
1689  HValue* val = instr->value();
1690  LOperand* value = UseRegisterAtStart(val);
1691  if (val->HasRange() && val->range()->IsInSmiRange()) {
1692  return DefineAsRegister(new(zone()) LSmiTag(value));
1693  } else {
1694  LNumberTagI* result = new(zone()) LNumberTagI(value);
1695  return AssignEnvironment(AssignPointerMap(DefineAsRegister(result)));
1696  }
1697  } else {
1698  ASSERT(to.IsDouble());
1699  LOperand* value = Use(instr->value());
1700  return DefineAsRegister(new(zone()) LInteger32ToDouble(value));
1701  }
1702  }
1703  UNREACHABLE();
1704  return NULL;
1705 }
1706 
1707 
1708 LInstruction* LChunkBuilder::DoCheckNonSmi(HCheckNonSmi* instr) {
1709  LOperand* value = UseRegisterAtStart(instr->value());
1710  return AssignEnvironment(new(zone()) LCheckNonSmi(value));
1711 }
1712 
1713 
1714 LInstruction* LChunkBuilder::DoCheckInstanceType(HCheckInstanceType* instr) {
1715  LOperand* value = UseRegisterAtStart(instr->value());
1716  LInstruction* result = new(zone()) LCheckInstanceType(value);
1717  return AssignEnvironment(result);
1718 }
1719 
1720 
1721 LInstruction* LChunkBuilder::DoCheckPrototypeMaps(HCheckPrototypeMaps* instr) {
1722  LOperand* temp1 = TempRegister();
1723  LOperand* temp2 = TempRegister();
1724  LInstruction* result = new(zone()) LCheckPrototypeMaps(temp1, temp2);
1725  return AssignEnvironment(result);
1726 }
1727 
1728 
1729 LInstruction* LChunkBuilder::DoCheckSmi(HCheckSmi* instr) {
1730  LOperand* value = UseRegisterAtStart(instr->value());
1731  return AssignEnvironment(new(zone()) LCheckSmi(value));
1732 }
1733 
1734 
1735 LInstruction* LChunkBuilder::DoCheckFunction(HCheckFunction* instr) {
1736  LOperand* value = UseRegisterAtStart(instr->value());
1737  return AssignEnvironment(new(zone()) LCheckFunction(value));
1738 }
1739 
1740 
1741 LInstruction* LChunkBuilder::DoCheckMaps(HCheckMaps* instr) {
1742  LOperand* value = UseRegisterAtStart(instr->value());
1743  LInstruction* result = new(zone()) LCheckMaps(value);
1744  return AssignEnvironment(result);
1745 }
1746 
1747 
1748 LInstruction* LChunkBuilder::DoClampToUint8(HClampToUint8* instr) {
1749  HValue* value = instr->value();
1750  Representation input_rep = value->representation();
1751  LOperand* reg = UseRegister(value);
1752  if (input_rep.IsDouble()) {
1753  // Revisit this decision, here and 8 lines below.
1754  return DefineAsRegister(new(zone()) LClampDToUint8(reg, FixedTemp(f22)));
1755  } else if (input_rep.IsInteger32()) {
1756  return DefineAsRegister(new(zone()) LClampIToUint8(reg));
1757  } else {
1758  ASSERT(input_rep.IsTagged());
1759  // Register allocator doesn't (yet) support allocation of double
1760  // temps. Reserve f22 explicitly.
1761  LClampTToUint8* result = new(zone()) LClampTToUint8(reg, FixedTemp(f22));
1762  return AssignEnvironment(DefineAsRegister(result));
1763  }
1764 }
1765 
1766 
1767 LInstruction* LChunkBuilder::DoReturn(HReturn* instr) {
1768  return new(zone()) LReturn(UseFixed(instr->value(), v0));
1769 }
1770 
1771 
1772 LInstruction* LChunkBuilder::DoConstant(HConstant* instr) {
1773  Representation r = instr->representation();
1774  if (r.IsInteger32()) {
1775  return DefineAsRegister(new(zone()) LConstantI);
1776  } else if (r.IsDouble()) {
1777  return DefineAsRegister(new(zone()) LConstantD);
1778  } else if (r.IsTagged()) {
1779  return DefineAsRegister(new(zone()) LConstantT);
1780  } else {
1781  UNREACHABLE();
1782  return NULL;
1783  }
1784 }
1785 
1786 
1787 LInstruction* LChunkBuilder::DoLoadGlobalCell(HLoadGlobalCell* instr) {
1788  LLoadGlobalCell* result = new(zone()) LLoadGlobalCell;
1789  return instr->RequiresHoleCheck()
1790  ? AssignEnvironment(DefineAsRegister(result))
1791  : DefineAsRegister(result);
1792 }
1793 
1794 
1795 LInstruction* LChunkBuilder::DoLoadGlobalGeneric(HLoadGlobalGeneric* instr) {
1796  LOperand* global_object = UseFixed(instr->global_object(), a0);
1797  LLoadGlobalGeneric* result = new(zone()) LLoadGlobalGeneric(global_object);
1798  return MarkAsCall(DefineFixed(result, v0), instr);
1799 }
1800 
1801 
1802 LInstruction* LChunkBuilder::DoStoreGlobalCell(HStoreGlobalCell* instr) {
1803  LOperand* value = UseRegister(instr->value());
1804  // Use a temp to check the value in the cell in the case where we perform
1805  // a hole check.
1806  return instr->RequiresHoleCheck()
1807  ? AssignEnvironment(new(zone()) LStoreGlobalCell(value, TempRegister()))
1808  : new(zone()) LStoreGlobalCell(value, NULL);
1809 }
1810 
1811 
1812 LInstruction* LChunkBuilder::DoStoreGlobalGeneric(HStoreGlobalGeneric* instr) {
1813  LOperand* global_object = UseFixed(instr->global_object(), a1);
1814  LOperand* value = UseFixed(instr->value(), a0);
1815  LStoreGlobalGeneric* result =
1816  new(zone()) LStoreGlobalGeneric(global_object, value);
1817  return MarkAsCall(result, instr);
1818 }
1819 
1820 
1821 LInstruction* LChunkBuilder::DoLoadContextSlot(HLoadContextSlot* instr) {
1822  LOperand* context = UseRegisterAtStart(instr->value());
1823  LInstruction* result =
1824  DefineAsRegister(new(zone()) LLoadContextSlot(context));
1825  return instr->RequiresHoleCheck() ? AssignEnvironment(result) : result;
1826 }
1827 
1828 
1829 LInstruction* LChunkBuilder::DoStoreContextSlot(HStoreContextSlot* instr) {
1830  LOperand* context;
1831  LOperand* value;
1832  if (instr->NeedsWriteBarrier()) {
1833  context = UseTempRegister(instr->context());
1834  value = UseTempRegister(instr->value());
1835  } else {
1836  context = UseRegister(instr->context());
1837  value = UseRegister(instr->value());
1838  }
1839  LInstruction* result = new(zone()) LStoreContextSlot(context, value);
1840  return instr->RequiresHoleCheck() ? AssignEnvironment(result) : result;
1841 }
1842 
1843 
1844 LInstruction* LChunkBuilder::DoLoadNamedField(HLoadNamedField* instr) {
1845  return DefineAsRegister(
1846  new(zone()) LLoadNamedField(UseRegisterAtStart(instr->object())));
1847 }
1848 
1849 
1850 LInstruction* LChunkBuilder::DoLoadNamedFieldPolymorphic(
1851  HLoadNamedFieldPolymorphic* instr) {
1852  ASSERT(instr->representation().IsTagged());
1853  if (instr->need_generic()) {
1854  LOperand* obj = UseFixed(instr->object(), a0);
1855  LLoadNamedFieldPolymorphic* result =
1856  new(zone()) LLoadNamedFieldPolymorphic(obj);
1857  return MarkAsCall(DefineFixed(result, v0), instr);
1858  } else {
1859  LOperand* obj = UseRegisterAtStart(instr->object());
1860  LLoadNamedFieldPolymorphic* result =
1861  new(zone()) LLoadNamedFieldPolymorphic(obj);
1862  return AssignEnvironment(DefineAsRegister(result));
1863  }
1864 }
1865 
1866 
1867 LInstruction* LChunkBuilder::DoLoadNamedGeneric(HLoadNamedGeneric* instr) {
1868  LOperand* object = UseFixed(instr->object(), a0);
1869  LInstruction* result = DefineFixed(new(zone()) LLoadNamedGeneric(object), v0);
1870  return MarkAsCall(result, instr);
1871 }
1872 
1873 
1874 LInstruction* LChunkBuilder::DoLoadFunctionPrototype(
1875  HLoadFunctionPrototype* instr) {
1876  return AssignEnvironment(DefineAsRegister(
1877  new(zone()) LLoadFunctionPrototype(UseRegister(instr->function()))));
1878 }
1879 
1880 
1881 LInstruction* LChunkBuilder::DoLoadElements(HLoadElements* instr) {
1882  LOperand* input = UseRegisterAtStart(instr->value());
1883  return DefineAsRegister(new(zone()) LLoadElements(input));
1884 }
1885 
1886 
1887 LInstruction* LChunkBuilder::DoLoadExternalArrayPointer(
1888  HLoadExternalArrayPointer* instr) {
1889  LOperand* input = UseRegisterAtStart(instr->value());
1890  return DefineAsRegister(new(zone()) LLoadExternalArrayPointer(input));
1891 }
1892 
1893 
1894 LInstruction* LChunkBuilder::DoLoadKeyedFastElement(
1895  HLoadKeyedFastElement* instr) {
1896  ASSERT(instr->representation().IsTagged());
1897  ASSERT(instr->key()->representation().IsInteger32());
1898  LOperand* obj = UseRegisterAtStart(instr->object());
1899  LOperand* key = UseRegisterAtStart(instr->key());
1900  LLoadKeyedFastElement* result = new(zone()) LLoadKeyedFastElement(obj, key);
1901  if (instr->RequiresHoleCheck()) AssignEnvironment(result);
1902  return DefineAsRegister(result);
1903 }
1904 
1905 
1906 LInstruction* LChunkBuilder::DoLoadKeyedFastDoubleElement(
1907  HLoadKeyedFastDoubleElement* instr) {
1908  ASSERT(instr->representation().IsDouble());
1909  ASSERT(instr->key()->representation().IsInteger32());
1910  LOperand* elements = UseTempRegister(instr->elements());
1911  LOperand* key = UseRegisterOrConstantAtStart(instr->key());
1912  LLoadKeyedFastDoubleElement* result =
1913  new(zone()) LLoadKeyedFastDoubleElement(elements, key);
1914  return AssignEnvironment(DefineAsRegister(result));
1915 }
1916 
1917 
1918 LInstruction* LChunkBuilder::DoLoadKeyedSpecializedArrayElement(
1919  HLoadKeyedSpecializedArrayElement* instr) {
1920  ElementsKind elements_kind = instr->elements_kind();
1921  Representation representation(instr->representation());
1922  ASSERT(
1923  (representation.IsInteger32() &&
1924  (elements_kind != EXTERNAL_FLOAT_ELEMENTS) &&
1925  (elements_kind != EXTERNAL_DOUBLE_ELEMENTS)) ||
1926  (representation.IsDouble() &&
1927  ((elements_kind == EXTERNAL_FLOAT_ELEMENTS) ||
1928  (elements_kind == EXTERNAL_DOUBLE_ELEMENTS))));
1929  ASSERT(instr->key()->representation().IsInteger32());
1930  LOperand* external_pointer = UseRegister(instr->external_pointer());
1931  LOperand* key = UseRegisterOrConstant(instr->key());
1932  LLoadKeyedSpecializedArrayElement* result =
1933  new(zone()) LLoadKeyedSpecializedArrayElement(external_pointer, key);
1934  LInstruction* load_instr = DefineAsRegister(result);
1935  // An unsigned int array load might overflow and cause a deopt, make sure it
1936  // has an environment.
1937  return (elements_kind == EXTERNAL_UNSIGNED_INT_ELEMENTS) ?
1938  AssignEnvironment(load_instr) : load_instr;
1939 }
1940 
1941 
1942 LInstruction* LChunkBuilder::DoLoadKeyedGeneric(HLoadKeyedGeneric* instr) {
1943  LOperand* object = UseFixed(instr->object(), a1);
1944  LOperand* key = UseFixed(instr->key(), a0);
1945 
1946  LInstruction* result =
1947  DefineFixed(new(zone()) LLoadKeyedGeneric(object, key), v0);
1948  return MarkAsCall(result, instr);
1949 }
1950 
1951 
1952 LInstruction* LChunkBuilder::DoStoreKeyedFastElement(
1953  HStoreKeyedFastElement* instr) {
1954  bool needs_write_barrier = instr->NeedsWriteBarrier();
1955  ASSERT(instr->value()->representation().IsTagged());
1956  ASSERT(instr->object()->representation().IsTagged());
1957  ASSERT(instr->key()->representation().IsInteger32());
1958 
1959  LOperand* obj = UseTempRegister(instr->object());
1960  LOperand* val = needs_write_barrier
1961  ? UseTempRegister(instr->value())
1962  : UseRegisterAtStart(instr->value());
1963  LOperand* key = needs_write_barrier
1964  ? UseTempRegister(instr->key())
1965  : UseRegisterOrConstantAtStart(instr->key());
1966  return new(zone()) LStoreKeyedFastElement(obj, key, val);
1967 }
1968 
1969 
1970 LInstruction* LChunkBuilder::DoStoreKeyedFastDoubleElement(
1971  HStoreKeyedFastDoubleElement* instr) {
1972  ASSERT(instr->value()->representation().IsDouble());
1973  ASSERT(instr->elements()->representation().IsTagged());
1974  ASSERT(instr->key()->representation().IsInteger32());
1975 
1976  LOperand* elements = UseRegisterAtStart(instr->elements());
1977  LOperand* val = UseTempRegister(instr->value());
1978  LOperand* key = UseRegisterOrConstantAtStart(instr->key());
1979 
1980  return new(zone()) LStoreKeyedFastDoubleElement(elements, key, val);
1981 }
1982 
1983 
1984 LInstruction* LChunkBuilder::DoStoreKeyedSpecializedArrayElement(
1985  HStoreKeyedSpecializedArrayElement* instr) {
1986  Representation representation(instr->value()->representation());
1987  ElementsKind elements_kind = instr->elements_kind();
1988  ASSERT(
1989  (representation.IsInteger32() &&
1990  (elements_kind != EXTERNAL_FLOAT_ELEMENTS) &&
1991  (elements_kind != EXTERNAL_DOUBLE_ELEMENTS)) ||
1992  (representation.IsDouble() &&
1993  ((elements_kind == EXTERNAL_FLOAT_ELEMENTS) ||
1994  (elements_kind == EXTERNAL_DOUBLE_ELEMENTS))));
1995  ASSERT(instr->external_pointer()->representation().IsExternal());
1996  ASSERT(instr->key()->representation().IsInteger32());
1997 
1998  LOperand* external_pointer = UseRegister(instr->external_pointer());
1999  bool val_is_temp_register =
2000  elements_kind == EXTERNAL_PIXEL_ELEMENTS ||
2001  elements_kind == EXTERNAL_FLOAT_ELEMENTS;
2002  LOperand* val = val_is_temp_register
2003  ? UseTempRegister(instr->value())
2004  : UseRegister(instr->value());
2005  LOperand* key = UseRegisterOrConstant(instr->key());
2006 
2007  return new(zone()) LStoreKeyedSpecializedArrayElement(external_pointer,
2008  key,
2009  val);
2010 }
2011 
2012 
2013 LInstruction* LChunkBuilder::DoStoreKeyedGeneric(HStoreKeyedGeneric* instr) {
2014  LOperand* obj = UseFixed(instr->object(), a2);
2015  LOperand* key = UseFixed(instr->key(), a1);
2016  LOperand* val = UseFixed(instr->value(), a0);
2017 
2018  ASSERT(instr->object()->representation().IsTagged());
2019  ASSERT(instr->key()->representation().IsTagged());
2020  ASSERT(instr->value()->representation().IsTagged());
2021 
2022  return MarkAsCall(new(zone()) LStoreKeyedGeneric(obj, key, val), instr);
2023 }
2024 
2025 
2026 LInstruction* LChunkBuilder::DoTransitionElementsKind(
2027  HTransitionElementsKind* instr) {
2028  ElementsKind from_kind = instr->original_map()->elements_kind();
2029  ElementsKind to_kind = instr->transitioned_map()->elements_kind();
2030  if (IsSimpleMapChangeTransition(from_kind, to_kind)) {
2031  LOperand* object = UseRegister(instr->object());
2032  LOperand* new_map_reg = TempRegister();
2033  LTransitionElementsKind* result =
2034  new(zone()) LTransitionElementsKind(object, new_map_reg, NULL);
2035  return DefineSameAsFirst(result);
2036  } else {
2037  LOperand* object = UseFixed(instr->object(), a0);
2038  LOperand* fixed_object_reg = FixedTemp(a2);
2039  LOperand* new_map_reg = FixedTemp(a3);
2040  LTransitionElementsKind* result =
2041  new(zone()) LTransitionElementsKind(object,
2042  new_map_reg,
2043  fixed_object_reg);
2044  return MarkAsCall(DefineFixed(result, v0), instr);
2045  }
2046 }
2047 
2048 
2049 LInstruction* LChunkBuilder::DoStoreNamedField(HStoreNamedField* instr) {
2050  bool needs_write_barrier = instr->NeedsWriteBarrier();
2051  bool needs_write_barrier_for_map = !instr->transition().is_null() &&
2052  instr->NeedsWriteBarrierForMap();
2053 
2054  LOperand* obj;
2055  if (needs_write_barrier) {
2056  obj = instr->is_in_object()
2057  ? UseRegister(instr->object())
2058  : UseTempRegister(instr->object());
2059  } else {
2060  obj = needs_write_barrier_for_map
2061  ? UseRegister(instr->object())
2062  : UseRegisterAtStart(instr->object());
2063  }
2064 
2065  LOperand* val = needs_write_barrier
2066  ? UseTempRegister(instr->value())
2067  : UseRegister(instr->value());
2068 
2069  // We need a temporary register for write barrier of the map field.
2070  LOperand* temp = needs_write_barrier_for_map ? TempRegister() : NULL;
2071 
2072  return new(zone()) LStoreNamedField(obj, val, temp);
2073 }
2074 
2075 
2076 LInstruction* LChunkBuilder::DoStoreNamedGeneric(HStoreNamedGeneric* instr) {
2077  LOperand* obj = UseFixed(instr->object(), a1);
2078  LOperand* val = UseFixed(instr->value(), a0);
2079 
2080  LInstruction* result = new(zone()) LStoreNamedGeneric(obj, val);
2081  return MarkAsCall(result, instr);
2082 }
2083 
2084 
2085 LInstruction* LChunkBuilder::DoStringAdd(HStringAdd* instr) {
2086  LOperand* left = UseRegisterAtStart(instr->left());
2087  LOperand* right = UseRegisterAtStart(instr->right());
2088  return MarkAsCall(DefineFixed(new(zone()) LStringAdd(left, right), v0),
2089  instr);
2090 }
2091 
2092 
2093 LInstruction* LChunkBuilder::DoStringCharCodeAt(HStringCharCodeAt* instr) {
2094  LOperand* string = UseTempRegister(instr->string());
2095  LOperand* index = UseTempRegister(instr->index());
2096  LStringCharCodeAt* result = new(zone()) LStringCharCodeAt(string, index);
2097  return AssignEnvironment(AssignPointerMap(DefineAsRegister(result)));
2098 }
2099 
2100 
2101 LInstruction* LChunkBuilder::DoStringCharFromCode(HStringCharFromCode* instr) {
2102  LOperand* char_code = UseRegister(instr->value());
2103  LStringCharFromCode* result = new(zone()) LStringCharFromCode(char_code);
2104  return AssignPointerMap(DefineAsRegister(result));
2105 }
2106 
2107 
2108 LInstruction* LChunkBuilder::DoStringLength(HStringLength* instr) {
2109  LOperand* string = UseRegisterAtStart(instr->value());
2110  return DefineAsRegister(new(zone()) LStringLength(string));
2111 }
2112 
2113 
2114 LInstruction* LChunkBuilder::DoAllocateObject(HAllocateObject* instr) {
2115  LAllocateObject* result =
2116  new(zone()) LAllocateObject(TempRegister(), TempRegister());
2117  return AssignPointerMap(DefineAsRegister(result));
2118 }
2119 
2120 
2121 LInstruction* LChunkBuilder::DoFastLiteral(HFastLiteral* instr) {
2122  return MarkAsCall(DefineFixed(new(zone()) LFastLiteral, v0), instr);
2123 }
2124 
2125 
2126 LInstruction* LChunkBuilder::DoArrayLiteral(HArrayLiteral* instr) {
2127  return MarkAsCall(DefineFixed(new(zone()) LArrayLiteral, v0), instr);
2128 }
2129 
2130 
2131 LInstruction* LChunkBuilder::DoObjectLiteral(HObjectLiteral* instr) {
2132  return MarkAsCall(DefineFixed(new(zone()) LObjectLiteral, v0), instr);
2133 }
2134 
2135 
2136 LInstruction* LChunkBuilder::DoRegExpLiteral(HRegExpLiteral* instr) {
2137  return MarkAsCall(DefineFixed(new(zone()) LRegExpLiteral, v0), instr);
2138 }
2139 
2140 
2141 LInstruction* LChunkBuilder::DoFunctionLiteral(HFunctionLiteral* instr) {
2142  return MarkAsCall(DefineFixed(new(zone()) LFunctionLiteral, v0), instr);
2143 }
2144 
2145 
2146 LInstruction* LChunkBuilder::DoDeleteProperty(HDeleteProperty* instr) {
2147  LOperand* object = UseFixed(instr->object(), a0);
2148  LOperand* key = UseFixed(instr->key(), a1);
2149  LDeleteProperty* result = new(zone()) LDeleteProperty(object, key);
2150  return MarkAsCall(DefineFixed(result, v0), instr);
2151 }
2152 
2153 
2154 LInstruction* LChunkBuilder::DoOsrEntry(HOsrEntry* instr) {
2155  allocator_->MarkAsOsrEntry();
2156  current_block_->last_environment()->set_ast_id(instr->ast_id());
2157  return AssignEnvironment(new(zone()) LOsrEntry);
2158 }
2159 
2160 
2161 LInstruction* LChunkBuilder::DoParameter(HParameter* instr) {
2162  int spill_index = chunk()->GetParameterStackSlot(instr->index());
2163  return DefineAsSpilled(new(zone()) LParameter, spill_index);
2164 }
2165 
2166 
2167 LInstruction* LChunkBuilder::DoUnknownOSRValue(HUnknownOSRValue* instr) {
2168  int spill_index = chunk()->GetNextSpillIndex(false); // Not double-width.
2169  if (spill_index > LUnallocated::kMaxFixedIndex) {
2170  Abort("Too many spill slots needed for OSR");
2171  spill_index = 0;
2172  }
2173  return DefineAsSpilled(new(zone()) LUnknownOSRValue, spill_index);
2174 }
2175 
2176 
2177 LInstruction* LChunkBuilder::DoCallStub(HCallStub* instr) {
2178  argument_count_ -= instr->argument_count();
2179  return MarkAsCall(DefineFixed(new(zone()) LCallStub, v0), instr);
2180 }
2181 
2182 
2183 LInstruction* LChunkBuilder::DoArgumentsObject(HArgumentsObject* instr) {
2184  // There are no real uses of the arguments object.
2185  // arguments.length and element access are supported directly on
2186  // stack arguments, and any real arguments object use causes a bailout.
2187  // So this value is never used.
2188  return NULL;
2189 }
2190 
2191 
2192 LInstruction* LChunkBuilder::DoAccessArgumentsAt(HAccessArgumentsAt* instr) {
2193  LOperand* arguments = UseRegister(instr->arguments());
2194  LOperand* length = UseTempRegister(instr->length());
2195  LOperand* index = UseRegister(instr->index());
2196  LAccessArgumentsAt* result =
2197  new(zone()) LAccessArgumentsAt(arguments, length, index);
2198  return AssignEnvironment(DefineAsRegister(result));
2199 }
2200 
2201 
2202 LInstruction* LChunkBuilder::DoToFastProperties(HToFastProperties* instr) {
2203  LOperand* object = UseFixed(instr->value(), a0);
2204  LToFastProperties* result = new(zone()) LToFastProperties(object);
2205  return MarkAsCall(DefineFixed(result, v0), instr);
2206 }
2207 
2208 
2209 LInstruction* LChunkBuilder::DoTypeof(HTypeof* instr) {
2210  LTypeof* result = new(zone()) LTypeof(UseFixed(instr->value(), a0));
2211  return MarkAsCall(DefineFixed(result, v0), instr);
2212 }
2213 
2214 
2215 LInstruction* LChunkBuilder::DoTypeofIsAndBranch(HTypeofIsAndBranch* instr) {
2216  return new(zone()) LTypeofIsAndBranch(UseTempRegister(instr->value()));
2217 }
2218 
2219 
2220 LInstruction* LChunkBuilder::DoIsConstructCallAndBranch(
2221  HIsConstructCallAndBranch* instr) {
2222  return new(zone()) LIsConstructCallAndBranch(TempRegister());
2223 }
2224 
2225 
2226 LInstruction* LChunkBuilder::DoSimulate(HSimulate* instr) {
2227  HEnvironment* env = current_block_->last_environment();
2228  ASSERT(env != NULL);
2229 
2230  env->set_ast_id(instr->ast_id());
2231 
2232  env->Drop(instr->pop_count());
2233  for (int i = 0; i < instr->values()->length(); ++i) {
2234  HValue* value = instr->values()->at(i);
2235  if (instr->HasAssignedIndexAt(i)) {
2236  env->Bind(instr->GetAssignedIndexAt(i), value);
2237  } else {
2238  env->Push(value);
2239  }
2240  }
2241 
2242  // If there is an instruction pending deoptimization environment create a
2243  // lazy bailout instruction to capture the environment.
2244  if (pending_deoptimization_ast_id_ == instr->ast_id()) {
2245  LInstruction* result = new(zone()) LLazyBailout;
2246  result = AssignEnvironment(result);
2247  // Store the lazy deopt environment with the instruction if needed. Right
2248  // now it is only used for LInstanceOfKnownGlobal.
2249  instruction_pending_deoptimization_environment_->
2250  SetDeferredLazyDeoptimizationEnvironment(result->environment());
2251  instruction_pending_deoptimization_environment_ = NULL;
2252  pending_deoptimization_ast_id_ = AstNode::kNoNumber;
2253  return result;
2254  }
2255 
2256  return NULL;
2257 }
2258 
2259 
2260 LInstruction* LChunkBuilder::DoStackCheck(HStackCheck* instr) {
2261  if (instr->is_function_entry()) {
2262  return MarkAsCall(new(zone()) LStackCheck, instr);
2263  } else {
2264  ASSERT(instr->is_backwards_branch());
2265  return AssignEnvironment(AssignPointerMap(new(zone()) LStackCheck));
2266  }
2267 }
2268 
2269 
2270 LInstruction* LChunkBuilder::DoEnterInlined(HEnterInlined* instr) {
2271  HEnvironment* outer = current_block_->last_environment();
2272  HConstant* undefined = graph()->GetConstantUndefined();
2273  HEnvironment* inner = outer->CopyForInlining(instr->closure(),
2274  instr->arguments_count(),
2275  instr->function(),
2276  undefined,
2277  instr->call_kind(),
2278  instr->is_construct());
2279  if (instr->arguments_var() != NULL) {
2280  inner->Bind(instr->arguments_var(), graph()->GetArgumentsObject());
2281  }
2282  current_block_->UpdateEnvironment(inner);
2283  chunk_->AddInlinedClosure(instr->closure());
2284  return NULL;
2285 }
2286 
2287 
2288 LInstruction* LChunkBuilder::DoLeaveInlined(HLeaveInlined* instr) {
2289  LInstruction* pop = NULL;
2290 
2291  HEnvironment* env = current_block_->last_environment();
2292 
2293  if (instr->arguments_pushed()) {
2294  int argument_count = env->arguments_environment()->parameter_count();
2295  pop = new(zone()) LDrop(argument_count);
2296  argument_count_ -= argument_count;
2297  }
2298 
2299  HEnvironment* outer = current_block_->last_environment()->
2300  DiscardInlined(false);
2301  current_block_->UpdateEnvironment(outer);
2302 
2303  return pop;
2304 }
2305 
2306 
2307 LInstruction* LChunkBuilder::DoIn(HIn* instr) {
2308  LOperand* key = UseRegisterAtStart(instr->key());
2309  LOperand* object = UseRegisterAtStart(instr->object());
2310  LIn* result = new(zone()) LIn(key, object);
2311  return MarkAsCall(DefineFixed(result, v0), instr);
2312 }
2313 
2314 
2315 LInstruction* LChunkBuilder::DoForInPrepareMap(HForInPrepareMap* instr) {
2316  LOperand* object = UseFixed(instr->enumerable(), a0);
2317  LForInPrepareMap* result = new(zone()) LForInPrepareMap(object);
2318  return MarkAsCall(DefineFixed(result, v0), instr, CAN_DEOPTIMIZE_EAGERLY);
2319 }
2320 
2321 
2322 LInstruction* LChunkBuilder::DoForInCacheArray(HForInCacheArray* instr) {
2323  LOperand* map = UseRegister(instr->map());
2324  return AssignEnvironment(DefineAsRegister(
2325  new(zone()) LForInCacheArray(map)));
2326 }
2327 
2328 
2329 LInstruction* LChunkBuilder::DoCheckMapValue(HCheckMapValue* instr) {
2330  LOperand* value = UseRegisterAtStart(instr->value());
2331  LOperand* map = UseRegisterAtStart(instr->map());
2332  return AssignEnvironment(new(zone()) LCheckMapValue(value, map));
2333 }
2334 
2335 
2336 LInstruction* LChunkBuilder::DoLoadFieldByIndex(HLoadFieldByIndex* instr) {
2337  LOperand* object = UseRegister(instr->object());
2338  LOperand* index = UseRegister(instr->index());
2339  return DefineAsRegister(new(zone()) LLoadFieldByIndex(object, index));
2340 }
2341 
2342 
2343 } } // namespace v8::internal
const FPURegister f4
const FPURegister f20
static LUnallocated * cast(LOperand *op)
Definition: lithium.h:196
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:305
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:219
Handle< Object > name() const
Definition: lithium-arm.h:1726
const char * ToCString(const v8::String::Utf8Value &value)
virtual LOperand * InputAt(int i)=0
void PrintF(const char *format,...)
Definition: v8utils.cc:40
static String * cast(Object *obj)
virtual void PrintOutputOperandTo(StringStream *stream)
Definition: lithium-arm.cc:120
Token::Value op() const
Definition: lithium-arm.h:1117
void MarkSpilledDoubleRegister(int allocation_index, LOperand *spill_operand)
Definition: lithium-arm.cc:84
const FPURegister f0
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:279
value format" "after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false, "print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false, "print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false, "report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true, "garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true, "flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true, "use incremental marking") DEFINE_bool(incremental_marking_steps, true, "do incremental marking steps") DEFINE_bool(trace_incremental_marking, false, "trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true, "Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false, "Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true, "use inline caching") DEFINE_bool(native_code_counters, false, "generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false, "Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true, "Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false, "Never perform compaction on full GC-testing only") DEFINE_bool(compact_code_space, true, "Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true, "Flush inline caches prior to mark compact collection and" "flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0, "Default seed for initializing random generator" "(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true, "allows verbose printing") DEFINE_bool(allow_natives_syntax, false, "allow natives syntax") DEFINE_bool(trace_sim, false, "Trace simulator execution") DEFINE_bool(check_icache, false, "Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0, "Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8, "Stack alingment in bytes in simulator(4 or 8, 8 is default)") DEFINE_bool(trace_exception, false, "print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false, "preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true, "randomize hashes to avoid predictable hash collisions" "(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0, "Fixed seed to use to hash property keys(0 means random)" "(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false, "activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true, "generate optimized regexp code") DEFINE_bool(testing_bool_flag, true, "testing_bool_flag") DEFINE_int(testing_int_flag, 13, "testing_int_flag") DEFINE_float(testing_float_flag, 2.5, "float-flag") DEFINE_string(testing_string_flag, "Hello, world!", "string-flag") DEFINE_int(testing_prng_seed, 42, "Seed used for threading test randomness") DEFINE_string(testing_serialization_file, "/tmp/serdes", "file in which to serialize heap") DEFINE_bool(help, false, "Print usage message, including flags, on console") DEFINE_bool(dump_counters, false, "Dump counters on exit") DEFINE_string(map_counters, "", "Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT, "Pass all remaining arguments to the script.Alias for\"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#43"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2#define FLAG_MODE_DEFINE_DEFAULTS#1"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flag-definitions.h"1#define FLAG_FULL(ftype, ctype, nam, def, cmt)#define FLAG_READONLY(ftype, ctype, nam, def, cmt)#define DEFINE_implication(whenflag, thenflag)#define DEFINE_bool(nam, def, cmt)#define DEFINE_int(nam, def, cmt)#define DEFINE_float(nam, def, cmt)#define DEFINE_string(nam, def, cmt)#define DEFINE_args(nam, def, cmt)#define FLAG DEFINE_bool(use_strict, false,"enforce strict mode") DEFINE_bool(es5_readonly, false,"activate correct semantics for inheriting readonliness") DEFINE_bool(es52_globals, false,"activate new semantics for global var declarations") DEFINE_bool(harmony_typeof, false,"enable harmony semantics for typeof") DEFINE_bool(harmony_scoping, false,"enable harmony block scoping") DEFINE_bool(harmony_modules, false,"enable harmony modules (implies block scoping)") DEFINE_bool(harmony_proxies, false,"enable harmony proxies") DEFINE_bool(harmony_collections, false,"enable harmony collections (sets, maps, and weak maps)") DEFINE_bool(harmony, false,"enable all harmony features (except typeof)") DEFINE_implication(harmony, harmony_scoping) DEFINE_implication(harmony, harmony_modules) DEFINE_implication(harmony, harmony_proxies) DEFINE_implication(harmony, harmony_collections) DEFINE_implication(harmony_modules, harmony_scoping) DEFINE_bool(packed_arrays, false,"optimizes arrays that have no holes") DEFINE_bool(smi_only_arrays, true,"tracks arrays with only smi values") DEFINE_bool(clever_optimizations, true,"Optimize object size, Array shift, DOM strings and string +") DEFINE_bool(unbox_double_arrays, true,"automatically unbox arrays of doubles") DEFINE_bool(string_slices, true,"use string slices") DEFINE_bool(crankshaft, true,"use crankshaft") DEFINE_string(hydrogen_filter,"","optimization filter") DEFINE_bool(use_range, true,"use hydrogen range analysis") DEFINE_bool(eliminate_dead_phis, true,"eliminate dead phis") DEFINE_bool(use_gvn, true,"use hydrogen global value numbering") DEFINE_bool(use_canonicalizing, true,"use hydrogen instruction canonicalizing") DEFINE_bool(use_inlining, true,"use function inlining") DEFINE_int(max_inlined_source_size, 600,"maximum source size in bytes considered for a single inlining") DEFINE_int(max_inlined_nodes, 196,"maximum number of AST nodes considered for a single inlining") DEFINE_int(max_inlined_nodes_cumulative, 196,"maximum cumulative number of AST nodes considered for inlining") DEFINE_bool(loop_invariant_code_motion, true,"loop invariant code motion") DEFINE_bool(collect_megamorphic_maps_from_stub_cache, true,"crankshaft harvests type feedback from stub cache") DEFINE_bool(hydrogen_stats, false,"print statistics for hydrogen") DEFINE_bool(trace_hydrogen, false,"trace generated hydrogen to file") DEFINE_string(trace_phase,"Z","trace generated IR for specified phases") DEFINE_bool(trace_inlining, false,"trace inlining decisions") DEFINE_bool(trace_alloc, false,"trace register allocator") DEFINE_bool(trace_all_uses, false,"trace all use positions") DEFINE_bool(trace_range, false,"trace range analysis") DEFINE_bool(trace_gvn, false,"trace global value numbering") DEFINE_bool(trace_representation, false,"trace representation types") DEFINE_bool(stress_pointer_maps, false,"pointer map for every instruction") DEFINE_bool(stress_environments, false,"environment for every instruction") DEFINE_int(deopt_every_n_times, 0,"deoptimize every n times a deopt point is passed") DEFINE_bool(trap_on_deopt, false,"put a break point before deoptimizing") DEFINE_bool(deoptimize_uncommon_cases, true,"deoptimize uncommon cases") DEFINE_bool(polymorphic_inlining, true,"polymorphic inlining") DEFINE_bool(use_osr, true,"use on-stack replacement") DEFINE_bool(array_bounds_checks_elimination, false,"perform array bounds checks elimination") DEFINE_bool(array_index_dehoisting, false,"perform array index dehoisting") DEFINE_bool(trace_osr, false,"trace on-stack replacement") DEFINE_int(stress_runs, 0,"number of stress runs") DEFINE_bool(optimize_closures, true,"optimize closures") DEFINE_bool(inline_construct, true,"inline constructor calls") DEFINE_bool(inline_arguments, true,"inline functions with arguments object") DEFINE_int(loop_weight, 1,"loop weight for representation inference") DEFINE_bool(optimize_for_in, true,"optimize functions containing for-in loops") DEFINE_bool(experimental_profiler, true,"enable all profiler experiments") DEFINE_bool(watch_ic_patching, false,"profiler considers IC stability") DEFINE_int(frame_count, 1,"number of stack frames inspected by the profiler") DEFINE_bool(self_optimization, false,"primitive functions trigger their own optimization") DEFINE_bool(direct_self_opt, false,"call recompile stub directly when self-optimizing") DEFINE_bool(retry_self_opt, false,"re-try self-optimization if it failed") DEFINE_bool(count_based_interrupts, false,"trigger profiler ticks based on counting instead of timing") DEFINE_bool(interrupt_at_exit, false,"insert an interrupt check at function exit") DEFINE_bool(weighted_back_edges, false,"weight back edges by jump distance for interrupt triggering") DEFINE_int(interrupt_budget, 5900,"execution budget before interrupt is triggered") DEFINE_int(type_info_threshold, 15,"percentage of ICs that must have type info to allow optimization") DEFINE_int(self_opt_count, 130,"call count before self-optimization") DEFINE_implication(experimental_profiler, watch_ic_patching) DEFINE_implication(experimental_profiler, self_optimization) DEFINE_implication(experimental_profiler, retry_self_opt) DEFINE_implication(experimental_profiler, count_based_interrupts) DEFINE_implication(experimental_profiler, interrupt_at_exit) DEFINE_implication(experimental_profiler, weighted_back_edges) DEFINE_bool(trace_opt_verbose, false,"extra verbose compilation tracing") DEFINE_implication(trace_opt_verbose, trace_opt) DEFINE_bool(debug_code, false,"generate extra code (assertions) for debugging") DEFINE_bool(code_comments, false,"emit comments in code disassembly") DEFINE_bool(enable_sse2, true,"enable use of SSE2 instructions if available") DEFINE_bool(enable_sse3, true,"enable use of SSE3 instructions if available") DEFINE_bool(enable_sse4_1, true,"enable use of SSE4.1 instructions if available") DEFINE_bool(enable_cmov, true,"enable use of CMOV instruction if available") DEFINE_bool(enable_rdtsc, true,"enable use of RDTSC instruction if available") DEFINE_bool(enable_sahf, true,"enable use of SAHF instruction if available (X64 only)") DEFINE_bool(enable_vfp3, true,"enable use of VFP3 instructions if available - this implies ""enabling ARMv7 instructions (ARM only)") DEFINE_bool(enable_armv7, true,"enable use of ARMv7 instructions if available (ARM only)") DEFINE_bool(enable_fpu, true,"enable use of MIPS FPU instructions if available (MIPS only)") DEFINE_string(expose_natives_as, NULL,"expose natives in global object") DEFINE_string(expose_debug_as, NULL,"expose debug in global object") DEFINE_bool(expose_gc, false,"expose gc extension") DEFINE_bool(expose_externalize_string, false,"expose externalize string extension") DEFINE_int(stack_trace_limit, 10,"number of stack frames to capture") DEFINE_bool(builtins_in_stack_traces, false,"show built-in functions in stack traces") DEFINE_bool(disable_native_files, false,"disable builtin natives files") DEFINE_bool(inline_new, true,"use fast inline allocation") DEFINE_bool(stack_trace_on_abort, true,"print a stack trace if an assertion failure occurs") DEFINE_bool(trace, false,"trace function calls") DEFINE_bool(mask_constants_with_cookie, true,"use random jit cookie to mask large constants") DEFINE_bool(lazy, true,"use lazy compilation") DEFINE_bool(trace_opt, false,"trace lazy optimization") DEFINE_bool(trace_opt_stats, false,"trace lazy optimization statistics") DEFINE_bool(opt, true,"use adaptive optimizations") DEFINE_bool(always_opt, false,"always try to optimize functions") DEFINE_bool(prepare_always_opt, false,"prepare for turning on always opt") DEFINE_bool(trace_deopt, false,"trace deoptimization") DEFINE_int(min_preparse_length, 1024,"minimum length for automatic enable preparsing") DEFINE_bool(always_full_compiler, false,"try to use the dedicated run-once backend for all code") DEFINE_bool(trace_bailout, false,"print reasons for falling back to using the classic V8 backend") DEFINE_bool(compilation_cache, true,"enable compilation cache") DEFINE_bool(cache_prototype_transitions, true,"cache prototype transitions") DEFINE_bool(trace_debug_json, false,"trace debugging JSON request/response") DEFINE_bool(debugger_auto_break, true,"automatically set the debug break flag when debugger commands are ""in the queue") DEFINE_bool(enable_liveedit, true,"enable liveedit experimental feature") DEFINE_bool(break_on_abort, true,"always cause a debug break before aborting") DEFINE_int(stack_size, kPointerSize *123,"default size of stack region v8 is allowed to use (in kBytes)") DEFINE_int(max_stack_trace_source_length, 300,"maximum length of function source code printed in a stack trace.") DEFINE_bool(always_inline_smi_code, false,"always inline smi code in non-opt code") DEFINE_int(max_new_space_size, 0,"max size of the new generation (in kBytes)") DEFINE_int(max_old_space_size, 0,"max size of the old generation (in Mbytes)") DEFINE_int(max_executable_size, 0,"max size of executable memory (in Mbytes)") DEFINE_bool(gc_global, false,"always perform global GCs") DEFINE_int(gc_interval,-1,"garbage collect after <n> allocations") DEFINE_bool(trace_gc, false,"print one trace line following each garbage collection") DEFINE_bool(trace_gc_nvp, false,"print one detailed trace line in name=value format ""after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false,"print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false,"print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false,"report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true,"garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true,"flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true,"use incremental marking") DEFINE_bool(incremental_marking_steps, true,"do incremental marking steps") DEFINE_bool(trace_incremental_marking, false,"trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true,"Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false,"Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true,"use inline caching") DEFINE_bool(native_code_counters, false,"generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false,"Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true,"Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false,"Never perform compaction on full GC - testing only") DEFINE_bool(compact_code_space, true,"Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true,"Flush inline caches prior to mark compact collection and ""flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0,"Default seed for initializing random generator ""(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true,"allows verbose printing") DEFINE_bool(allow_natives_syntax, false,"allow natives syntax") DEFINE_bool(trace_sim, false,"Trace simulator execution") DEFINE_bool(check_icache, false,"Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0,"Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8,"Stack alingment in bytes in simulator (4 or 8, 8 is default)") DEFINE_bool(trace_exception, false,"print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false,"preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true,"randomize hashes to avoid predictable hash collisions ""(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0,"Fixed seed to use to hash property keys (0 means random)""(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false,"activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true,"generate optimized regexp code") DEFINE_bool(testing_bool_flag, true,"testing_bool_flag") DEFINE_int(testing_int_flag, 13,"testing_int_flag") DEFINE_float(testing_float_flag, 2.5,"float-flag") DEFINE_string(testing_string_flag,"Hello, world!","string-flag") DEFINE_int(testing_prng_seed, 42,"Seed used for threading test randomness") DEFINE_string(testing_serialization_file,"/tmp/serdes","file in which to serialize heap") DEFINE_bool(help, false,"Print usage message, including flags, on console") DEFINE_bool(dump_counters, false,"Dump counters on exit") DEFINE_string(map_counters,"","Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT,"Pass all remaining arguments to the script. Alias for \"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#47"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2 namespace{struct Flag{enum FlagType{TYPE_BOOL, TYPE_INT, TYPE_FLOAT, TYPE_STRING, TYPE_ARGS} name
Definition: flags.cc:1349
const FPURegister f22
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:226
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination trace on stack replacement optimize closures functions with arguments object optimize functions containing for in loops profiler considers IC stability primitive functions trigger their own optimization re try self optimization if it failed insert an interrupt check at function exit execution budget before interrupt is triggered call count before self optimization self_optimization count_based_interrupts weighted_back_edges trace_opt emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 instructions(ARM only)") DEFINE_bool(enable_armv7
LLabel(HBasicBlock *block)
Definition: lithium-arm.h:400
Handle< String > name() const
Definition: lithium-arm.h:1542
static const int kNumAllocatableRegisters
Handle< Object > name() const
Definition: lithium-arm.h:1705
LEnvironment * environment() const
Definition: lithium-arm.h:240
Token::Value op() const
Definition: lithium-arm.h:610
#define ASSERT(condition)
Definition: checks.h:270
virtual const char * Mnemonic() const =0
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:111
void PrintTo(StringStream *stream)
Definition: lithium.cc:203
bool IsSimpleMapChangeTransition(ElementsKind from_kind, ElementsKind to_kind)
#define LITHIUM_CONCRETE_INSTRUCTION_LIST(V)
Definition: lithium-arm.h:49
LChunk(CompilationInfo *info, HGraph *graph)
Definition: lithium-arm.cc:410
EqualityKind kind() const
Definition: lithium-arm.h:668
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:368
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:293
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:299
virtual bool HasResult() const =0
#define UNREACHABLE()
Definition: checks.h:50
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:324
DwVfpRegister DoubleRegister
void PrintTo(StringStream *stream)
Definition: lithium.cc:158
#define DEFINE_COMPILE(type)
Definition: lithium-mips.cc:37
LLabel * replacement() const
Definition: lithium-arm.h:410
virtual const char * Mnemonic() const
Definition: lithium-arm.cc:156
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:201
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:330
void MarkSpilledRegister(int allocation_index, LOperand *spill_operand)
Definition: lithium-arm.cc:54
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination trace on stack replacement optimize closures functions with arguments object optimize functions containing for in loops profiler considers IC stability primitive functions trigger their own optimization re try self optimization if it failed insert an interrupt check at function exit execution budget before interrupt is triggered call count before self optimization self_optimization count_based_interrupts weighted_back_edges trace_opt emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 enable use of ARMv7 instructions if enable use of MIPS FPU instructions if NULL
static const char * String(Value tok)
Definition: token.h:275
const int kPointerSize
Definition: globals.h:234
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:336
bool HasEnvironment() const
Definition: lithium-arm.h:241
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:341
virtual LOperand * result()=0
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:233
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:395
const FPURegister f2
virtual void PrintTo(StringStream *stream)
Definition: lithium-arm.cc:92
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:125
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:359
LOsrEntry()
Definition: lithium-arm.cc:44
LPointerMap * pointer_map() const
Definition: lithium-arm.h:244
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:312
virtual DECLARE_CONCRETE_INSTRUCTION(StringCompareAndBranch,"string-compare-and-branch") Token void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:247
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:145
int block_id() const
Definition: lithium-arm.h:369
void PrintDataTo(StringStream *stream) const
Definition: lithium.cc:137
virtual const char * Mnemonic() const
Definition: lithium-arm.cc:170
#define UNIMPLEMENTED()
Definition: checks.h:48
static const int kNumAllocatableRegisters
Definition: assembler-arm.h:74
Token::Value op() const
Definition: lithium-arm.h:1140
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination trace on stack replacement optimize closures functions with arguments object optimize functions containing for in loops profiler considers IC stability primitive functions trigger their own optimization re try self optimization if it failed insert an interrupt check at function exit execution budget before interrupt is triggered call count before self optimization self_optimization count_based_interrupts weighted_back_edges trace_opt emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 enable use of ARMv7 instructions if enable use of MIPS FPU instructions if NULL
Definition: flags.cc:274
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:210
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:190
Handle< String > name() const
Definition: lithium-arm.h:1516
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:195
const FPURegister f6
bool HasPointerMap() const
Definition: lithium-arm.h:245
bool IsRedundant() const
Definition: lithium-arm.cc:134
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:319
virtual int InputCount()=0
Handle< String > type_literal()
Definition: lithium-arm.h:2088
FlagType type() const
Definition: flags.cc:1358
void PrintTo(StringStream *stream)
Definition: lithium.cc:35
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:288
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:348
const FPURegister f8