v8  3.11.10(node0.8.26)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
lithium-x64.cc
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "v8.h"
29 
30 #if defined(V8_TARGET_ARCH_X64)
31 
32 #include "lithium-allocator-inl.h"
33 #include "x64/lithium-x64.h"
35 
36 namespace v8 {
37 namespace internal {
38 
39 #define DEFINE_COMPILE(type) \
40  void L##type::CompileToNative(LCodeGen* generator) { \
41  generator->Do##type(this); \
42  }
44 #undef DEFINE_COMPILE
45 
47  for (int i = 0; i < Register::kNumAllocatableRegisters; ++i) {
48  register_spills_[i] = NULL;
49  }
50  for (int i = 0; i < DoubleRegister::kNumAllocatableRegisters; ++i) {
51  double_register_spills_[i] = NULL;
52  }
53 }
54 
55 
56 void LOsrEntry::MarkSpilledRegister(int allocation_index,
57  LOperand* spill_operand) {
58  ASSERT(spill_operand->IsStackSlot());
59  ASSERT(register_spills_[allocation_index] == NULL);
60  register_spills_[allocation_index] = spill_operand;
61 }
62 
63 
64 void LOsrEntry::MarkSpilledDoubleRegister(int allocation_index,
65  LOperand* spill_operand) {
66  ASSERT(spill_operand->IsDoubleStackSlot());
67  ASSERT(double_register_spills_[allocation_index] == NULL);
68  double_register_spills_[allocation_index] = spill_operand;
69 }
70 
71 
72 #ifdef DEBUG
73 void LInstruction::VerifyCall() {
74  // Call instructions can use only fixed registers as temporaries and
75  // outputs because all registers are blocked by the calling convention.
76  // Inputs operands must use a fixed register or use-at-start policy or
77  // a non-register policy.
78  ASSERT(Output() == NULL ||
79  LUnallocated::cast(Output())->HasFixedPolicy() ||
80  !LUnallocated::cast(Output())->HasRegisterPolicy());
81  for (UseIterator it(this); !it.Done(); it.Advance()) {
82  LUnallocated* operand = LUnallocated::cast(it.Current());
83  ASSERT(operand->HasFixedPolicy() ||
84  operand->IsUsedAtStart());
85  }
86  for (TempIterator it(this); !it.Done(); it.Advance()) {
87  LUnallocated* operand = LUnallocated::cast(it.Current());
88  ASSERT(operand->HasFixedPolicy() ||!operand->HasRegisterPolicy());
89  }
90 }
91 #endif
92 
93 
94 void LInstruction::PrintTo(StringStream* stream) {
95  stream->Add("%s ", this->Mnemonic());
96 
97  PrintOutputOperandTo(stream);
98 
99  PrintDataTo(stream);
100 
101  if (HasEnvironment()) {
102  stream->Add(" ");
103  environment()->PrintTo(stream);
104  }
105 
106  if (HasPointerMap()) {
107  stream->Add(" ");
108  pointer_map()->PrintTo(stream);
109  }
110 }
111 
112 
113 void LInstruction::PrintDataTo(StringStream* stream) {
114  stream->Add("= ");
115  for (int i = 0; i < InputCount(); i++) {
116  if (i > 0) stream->Add(" ");
117  InputAt(i)->PrintTo(stream);
118  }
119 }
120 
121 
122 void LInstruction::PrintOutputOperandTo(StringStream* stream) {
123  if (HasResult()) result()->PrintTo(stream);
124 }
125 
126 
127 void LLabel::PrintDataTo(StringStream* stream) {
128  LGap::PrintDataTo(stream);
129  LLabel* rep = replacement();
130  if (rep != NULL) {
131  stream->Add(" Dead block replaced with B%d", rep->block_id());
132  }
133 }
134 
135 
136 bool LGap::IsRedundant() const {
137  for (int i = 0; i < 4; i++) {
138  if (parallel_moves_[i] != NULL && !parallel_moves_[i]->IsRedundant()) {
139  return false;
140  }
141  }
142 
143  return true;
144 }
145 
146 
147 void LGap::PrintDataTo(StringStream* stream) {
148  for (int i = 0; i < 4; i++) {
149  stream->Add("(");
150  if (parallel_moves_[i] != NULL) {
151  parallel_moves_[i]->PrintDataTo(stream);
152  }
153  stream->Add(") ");
154  }
155 }
156 
157 
158 const char* LArithmeticD::Mnemonic() const {
159  switch (op()) {
160  case Token::ADD: return "add-d";
161  case Token::SUB: return "sub-d";
162  case Token::MUL: return "mul-d";
163  case Token::DIV: return "div-d";
164  case Token::MOD: return "mod-d";
165  default:
166  UNREACHABLE();
167  return NULL;
168  }
169 }
170 
171 
172 const char* LArithmeticT::Mnemonic() const {
173  switch (op()) {
174  case Token::ADD: return "add-t";
175  case Token::SUB: return "sub-t";
176  case Token::MUL: return "mul-t";
177  case Token::MOD: return "mod-t";
178  case Token::DIV: return "div-t";
179  case Token::BIT_AND: return "bit-and-t";
180  case Token::BIT_OR: return "bit-or-t";
181  case Token::BIT_XOR: return "bit-xor-t";
182  case Token::SHL: return "sal-t";
183  case Token::SAR: return "sar-t";
184  case Token::SHR: return "shr-t";
185  default:
186  UNREACHABLE();
187  return NULL;
188  }
189 }
190 
191 
192 void LGoto::PrintDataTo(StringStream* stream) {
193  stream->Add("B%d", block_id());
194 }
195 
196 
197 void LBranch::PrintDataTo(StringStream* stream) {
198  stream->Add("B%d | B%d on ", true_block_id(), false_block_id());
199  InputAt(0)->PrintTo(stream);
200 }
201 
202 
203 void LCmpIDAndBranch::PrintDataTo(StringStream* stream) {
204  stream->Add("if ");
205  InputAt(0)->PrintTo(stream);
206  stream->Add(" %s ", Token::String(op()));
207  InputAt(1)->PrintTo(stream);
208  stream->Add(" then B%d else B%d", true_block_id(), false_block_id());
209 }
210 
211 
212 void LIsNilAndBranch::PrintDataTo(StringStream* stream) {
213  stream->Add("if ");
214  InputAt(0)->PrintTo(stream);
215  stream->Add(kind() == kStrictEquality ? " === " : " == ");
216  stream->Add(nil() == kNullValue ? "null" : "undefined");
217  stream->Add(" then B%d else B%d", true_block_id(), false_block_id());
218 }
219 
220 
221 void LIsObjectAndBranch::PrintDataTo(StringStream* stream) {
222  stream->Add("if is_object(");
223  InputAt(0)->PrintTo(stream);
224  stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
225 }
226 
227 
228 void LIsStringAndBranch::PrintDataTo(StringStream* stream) {
229  stream->Add("if is_string(");
230  InputAt(0)->PrintTo(stream);
231  stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
232 }
233 
234 
235 void LIsSmiAndBranch::PrintDataTo(StringStream* stream) {
236  stream->Add("if is_smi(");
237  InputAt(0)->PrintTo(stream);
238  stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
239 }
240 
241 
242 void LIsUndetectableAndBranch::PrintDataTo(StringStream* stream) {
243  stream->Add("if is_undetectable(");
244  InputAt(0)->PrintTo(stream);
245  stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
246 }
247 
248 
249 void LStringCompareAndBranch::PrintDataTo(StringStream* stream) {
250  stream->Add("if string_compare(");
251  InputAt(0)->PrintTo(stream);
252  InputAt(1)->PrintTo(stream);
253  stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
254 }
255 
256 
257 void LHasInstanceTypeAndBranch::PrintDataTo(StringStream* stream) {
258  stream->Add("if has_instance_type(");
259  InputAt(0)->PrintTo(stream);
260  stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
261 }
262 
263 
264 void LHasCachedArrayIndexAndBranch::PrintDataTo(StringStream* stream) {
265  stream->Add("if has_cached_array_index(");
266  InputAt(0)->PrintTo(stream);
267  stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
268 }
269 
270 
271 void LClassOfTestAndBranch::PrintDataTo(StringStream* stream) {
272  stream->Add("if class_of_test(");
273  InputAt(0)->PrintTo(stream);
274  stream->Add(", \"%o\") then B%d else B%d",
275  *hydrogen()->class_name(),
276  true_block_id(),
277  false_block_id());
278 }
279 
280 
281 void LTypeofIsAndBranch::PrintDataTo(StringStream* stream) {
282  stream->Add("if typeof ");
283  InputAt(0)->PrintTo(stream);
284  stream->Add(" == \"%s\" then B%d else B%d",
285  *hydrogen()->type_literal()->ToCString(),
287 }
288 
289 
290 void LCallConstantFunction::PrintDataTo(StringStream* stream) {
291  stream->Add("#%d / ", arity());
292 }
293 
294 
295 void LUnaryMathOperation::PrintDataTo(StringStream* stream) {
296  stream->Add("/%s ", hydrogen()->OpName());
297  InputAt(0)->PrintTo(stream);
298 }
299 
300 
301 void LLoadContextSlot::PrintDataTo(StringStream* stream) {
302  InputAt(0)->PrintTo(stream);
303  stream->Add("[%d]", slot_index());
304 }
305 
306 
307 void LStoreContextSlot::PrintDataTo(StringStream* stream) {
308  InputAt(0)->PrintTo(stream);
309  stream->Add("[%d] <- ", slot_index());
310  InputAt(1)->PrintTo(stream);
311 }
312 
313 
314 void LInvokeFunction::PrintDataTo(StringStream* stream) {
315  stream->Add("= ");
316  InputAt(0)->PrintTo(stream);
317  stream->Add(" #%d / ", arity());
318 }
319 
320 
321 void LCallKeyed::PrintDataTo(StringStream* stream) {
322  stream->Add("[rcx] #%d / ", arity());
323 }
324 
325 
326 void LCallNamed::PrintDataTo(StringStream* stream) {
327  SmartArrayPointer<char> name_string = name()->ToCString();
328  stream->Add("%s #%d / ", *name_string, arity());
329 }
330 
331 
332 void LCallGlobal::PrintDataTo(StringStream* stream) {
333  SmartArrayPointer<char> name_string = name()->ToCString();
334  stream->Add("%s #%d / ", *name_string, arity());
335 }
336 
337 
338 void LCallKnownGlobal::PrintDataTo(StringStream* stream) {
339  stream->Add("#%d / ", arity());
340 }
341 
342 
343 void LCallNew::PrintDataTo(StringStream* stream) {
344  stream->Add("= ");
345  InputAt(0)->PrintTo(stream);
346  stream->Add(" #%d / ", arity());
347 }
348 
349 
350 void LAccessArgumentsAt::PrintDataTo(StringStream* stream) {
351  arguments()->PrintTo(stream);
352 
353  stream->Add(" length ");
354  length()->PrintTo(stream);
355 
356  stream->Add(" index ");
357  index()->PrintTo(stream);
358 }
359 
360 
361 int LChunk::GetNextSpillIndex(bool is_double) {
362  return spill_slot_count_++;
363 }
364 
365 
366 LOperand* LChunk::GetNextSpillSlot(bool is_double) {
367  // All stack slots are Double stack slots on x64.
368  // Alternatively, at some point, start using half-size
369  // stack slots for int32 values.
370  int index = GetNextSpillIndex(is_double);
371  if (is_double) {
372  return LDoubleStackSlot::Create(index, zone());
373  } else {
374  return LStackSlot::Create(index, zone());
375  }
376 }
377 
378 
380  HPhase phase("L_Mark empty blocks", this);
381  for (int i = 0; i < graph()->blocks()->length(); ++i) {
382  HBasicBlock* block = graph()->blocks()->at(i);
383  int first = block->first_instruction_index();
384  int last = block->last_instruction_index();
385  LInstruction* first_instr = instructions()->at(first);
386  LInstruction* last_instr = instructions()->at(last);
387 
388  LLabel* label = LLabel::cast(first_instr);
389  if (last_instr->IsGoto()) {
390  LGoto* goto_instr = LGoto::cast(last_instr);
391  if (label->IsRedundant() &&
392  !label->is_loop_header()) {
393  bool can_eliminate = true;
394  for (int i = first + 1; i < last && can_eliminate; ++i) {
395  LInstruction* cur = instructions()->at(i);
396  if (cur->IsGap()) {
397  LGap* gap = LGap::cast(cur);
398  if (!gap->IsRedundant()) {
399  can_eliminate = false;
400  }
401  } else {
402  can_eliminate = false;
403  }
404  }
405 
406  if (can_eliminate) {
407  label->set_replacement(GetLabel(goto_instr->block_id()));
408  }
409  }
410  }
411  }
412 }
413 
414 
415 void LStoreNamedField::PrintDataTo(StringStream* stream) {
416  object()->PrintTo(stream);
417  stream->Add(".");
418  stream->Add(*String::cast(*name())->ToCString());
419  stream->Add(" <- ");
420  value()->PrintTo(stream);
421 }
422 
423 
424 void LStoreNamedGeneric::PrintDataTo(StringStream* stream) {
425  object()->PrintTo(stream);
426  stream->Add(".");
427  stream->Add(*String::cast(*name())->ToCString());
428  stream->Add(" <- ");
429  value()->PrintTo(stream);
430 }
431 
432 
433 void LStoreKeyedFastElement::PrintDataTo(StringStream* stream) {
434  object()->PrintTo(stream);
435  stream->Add("[");
436  key()->PrintTo(stream);
437  stream->Add("] <- ");
438  value()->PrintTo(stream);
439 }
440 
441 
442 void LStoreKeyedFastDoubleElement::PrintDataTo(StringStream* stream) {
443  elements()->PrintTo(stream);
444  stream->Add("[");
445  key()->PrintTo(stream);
446  stream->Add("] <- ");
447  value()->PrintTo(stream);
448 }
449 
450 
451 void LStoreKeyedGeneric::PrintDataTo(StringStream* stream) {
452  object()->PrintTo(stream);
453  stream->Add("[");
454  key()->PrintTo(stream);
455  stream->Add("] <- ");
456  value()->PrintTo(stream);
457 }
458 
459 
460 void LTransitionElementsKind::PrintDataTo(StringStream* stream) {
461  object()->PrintTo(stream);
462  stream->Add(" %p -> %p", *original_map(), *transitioned_map());
463 }
464 
465 
466 void LChunk::AddInstruction(LInstruction* instr, HBasicBlock* block) {
467  LInstructionGap* gap = new(graph_->zone()) LInstructionGap(block);
468  int index = -1;
469  if (instr->IsControl()) {
470  instructions_.Add(gap, zone());
471  index = instructions_.length();
472  instructions_.Add(instr, zone());
473  } else {
474  index = instructions_.length();
475  instructions_.Add(instr, zone());
476  instructions_.Add(gap, zone());
477  }
478  if (instr->HasPointerMap()) {
479  pointer_maps_.Add(instr->pointer_map(), zone());
480  instr->pointer_map()->set_lithium_position(index);
481  }
482 }
483 
484 
485 LConstantOperand* LChunk::DefineConstantOperand(HConstant* constant) {
486  return LConstantOperand::Create(constant->id(), zone());
487 }
488 
489 
490 int LChunk::GetParameterStackSlot(int index) const {
491  // The receiver is at index 0, the first parameter at index 1, so we
492  // shift all parameter indexes down by the number of parameters, and
493  // make sure they end up negative so they are distinguishable from
494  // spill slots.
495  int result = index - info()->scope()->num_parameters() - 1;
496  ASSERT(result < 0);
497  return result;
498 }
499 
500 // A parameter relative to ebp in the arguments stub.
501 int LChunk::ParameterAt(int index) {
502  ASSERT(-1 <= index); // -1 is the receiver.
503  return (1 + info()->scope()->num_parameters() - index) *
504  kPointerSize;
505 }
506 
507 
508 LGap* LChunk::GetGapAt(int index) const {
509  return LGap::cast(instructions_[index]);
510 }
511 
512 
513 bool LChunk::IsGapAt(int index) const {
514  return instructions_[index]->IsGap();
515 }
516 
517 
518 int LChunk::NearestGapPos(int index) const {
519  while (!IsGapAt(index)) index--;
520  return index;
521 }
522 
523 
524 void LChunk::AddGapMove(int index, LOperand* from, LOperand* to) {
526  LGap::START, zone())->AddMove(from, to, zone());
527 }
528 
529 
530 Handle<Object> LChunk::LookupLiteral(LConstantOperand* operand) const {
531  return HConstant::cast(graph_->LookupValue(operand->index()))->handle();
532 }
533 
534 
536  LConstantOperand* operand) const {
537  return graph_->LookupValue(operand->index())->representation();
538 }
539 
540 
541 LChunk* LChunkBuilder::Build() {
542  ASSERT(is_unused());
543  chunk_ = new(zone()) LChunk(info(), graph());
544  HPhase phase("L_Building chunk", chunk_);
545  status_ = BUILDING;
546  const ZoneList<HBasicBlock*>* blocks = graph()->blocks();
547  for (int i = 0; i < blocks->length(); i++) {
548  HBasicBlock* next = NULL;
549  if (i < blocks->length() - 1) next = blocks->at(i + 1);
550  DoBasicBlock(blocks->at(i), next);
551  if (is_aborted()) return NULL;
552  }
553  status_ = DONE;
554  return chunk_;
555 }
556 
557 
558 void LChunkBuilder::Abort(const char* format, ...) {
559  if (FLAG_trace_bailout) {
560  SmartArrayPointer<char> name(
561  info()->shared_info()->DebugName()->ToCString());
562  PrintF("Aborting LChunk building in @\"%s\": ", *name);
563  va_list arguments;
564  va_start(arguments, format);
565  OS::VPrint(format, arguments);
566  va_end(arguments);
567  PrintF("\n");
568  }
569  status_ = ABORTED;
570 }
571 
572 
573 LUnallocated* LChunkBuilder::ToUnallocated(Register reg) {
574  return new(zone()) LUnallocated(LUnallocated::FIXED_REGISTER,
576 }
577 
578 
579 LUnallocated* LChunkBuilder::ToUnallocated(XMMRegister reg) {
580  return new(zone()) LUnallocated(LUnallocated::FIXED_DOUBLE_REGISTER,
582 }
583 
584 
585 LOperand* LChunkBuilder::UseFixed(HValue* value, Register fixed_register) {
586  return Use(value, ToUnallocated(fixed_register));
587 }
588 
589 
590 LOperand* LChunkBuilder::UseFixedDouble(HValue* value, XMMRegister reg) {
591  return Use(value, ToUnallocated(reg));
592 }
593 
594 
595 LOperand* LChunkBuilder::UseRegister(HValue* value) {
596  return Use(value, new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER));
597 }
598 
599 
600 LOperand* LChunkBuilder::UseRegisterAtStart(HValue* value) {
601  return Use(value,
602  new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER,
604 }
605 
606 
607 LOperand* LChunkBuilder::UseTempRegister(HValue* value) {
608  return Use(value, new(zone()) LUnallocated(LUnallocated::WRITABLE_REGISTER));
609 }
610 
611 
612 LOperand* LChunkBuilder::Use(HValue* value) {
613  return Use(value, new(zone()) LUnallocated(LUnallocated::NONE));
614 }
615 
616 
617 LOperand* LChunkBuilder::UseAtStart(HValue* value) {
618  return Use(value, new(zone()) LUnallocated(LUnallocated::NONE,
620 }
621 
622 
623 LOperand* LChunkBuilder::UseOrConstant(HValue* value) {
624  return value->IsConstant()
625  ? chunk_->DefineConstantOperand(HConstant::cast(value))
626  : Use(value);
627 }
628 
629 
630 LOperand* LChunkBuilder::UseOrConstantAtStart(HValue* value) {
631  return value->IsConstant()
632  ? chunk_->DefineConstantOperand(HConstant::cast(value))
633  : UseAtStart(value);
634 }
635 
636 
637 LOperand* LChunkBuilder::UseRegisterOrConstant(HValue* value) {
638  return value->IsConstant()
639  ? chunk_->DefineConstantOperand(HConstant::cast(value))
640  : UseRegister(value);
641 }
642 
643 
644 LOperand* LChunkBuilder::UseRegisterOrConstantAtStart(HValue* value) {
645  return value->IsConstant()
646  ? chunk_->DefineConstantOperand(HConstant::cast(value))
647  : UseRegisterAtStart(value);
648 }
649 
650 
651 LOperand* LChunkBuilder::UseAny(HValue* value) {
652  return value->IsConstant()
653  ? chunk_->DefineConstantOperand(HConstant::cast(value))
654  : Use(value, new(zone()) LUnallocated(LUnallocated::ANY));
655 }
656 
657 
658 LOperand* LChunkBuilder::Use(HValue* value, LUnallocated* operand) {
659  if (value->EmitAtUses()) {
660  HInstruction* instr = HInstruction::cast(value);
661  VisitInstruction(instr);
662  }
663  operand->set_virtual_register(value->id());
664  return operand;
665 }
666 
667 
668 template<int I, int T>
669 LInstruction* LChunkBuilder::Define(LTemplateInstruction<1, I, T>* instr,
670  LUnallocated* result) {
671  result->set_virtual_register(current_instruction_->id());
672  instr->set_result(result);
673  return instr;
674 }
675 
676 
677 template<int I, int T>
678 LInstruction* LChunkBuilder::DefineAsRegister(
679  LTemplateInstruction<1, I, T>* instr) {
680  return Define(instr,
681  new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER));
682 }
683 
684 
685 template<int I, int T>
686 LInstruction* LChunkBuilder::DefineAsSpilled(
687  LTemplateInstruction<1, I, T>* instr,
688  int index) {
689  return Define(instr,
690  new(zone()) LUnallocated(LUnallocated::FIXED_SLOT, index));
691 }
692 
693 
694 template<int I, int T>
695 LInstruction* LChunkBuilder::DefineSameAsFirst(
696  LTemplateInstruction<1, I, T>* instr) {
697  return Define(instr,
698  new(zone()) LUnallocated(LUnallocated::SAME_AS_FIRST_INPUT));
699 }
700 
701 
702 template<int I, int T>
703 LInstruction* LChunkBuilder::DefineFixed(LTemplateInstruction<1, I, T>* instr,
704  Register reg) {
705  return Define(instr, ToUnallocated(reg));
706 }
707 
708 
709 template<int I, int T>
710 LInstruction* LChunkBuilder::DefineFixedDouble(
711  LTemplateInstruction<1, I, T>* instr,
712  XMMRegister reg) {
713  return Define(instr, ToUnallocated(reg));
714 }
715 
716 
717 LInstruction* LChunkBuilder::AssignEnvironment(LInstruction* instr) {
718  HEnvironment* hydrogen_env = current_block_->last_environment();
719  int argument_index_accumulator = 0;
720  instr->set_environment(CreateEnvironment(hydrogen_env,
721  &argument_index_accumulator));
722  return instr;
723 }
724 
725 
726 LInstruction* LChunkBuilder::MarkAsCall(LInstruction* instr,
727  HInstruction* hinstr,
728  CanDeoptimize can_deoptimize) {
729 #ifdef DEBUG
730  instr->VerifyCall();
731 #endif
732  instr->MarkAsCall();
733  instr = AssignPointerMap(instr);
734 
735  if (hinstr->HasObservableSideEffects()) {
736  ASSERT(hinstr->next()->IsSimulate());
737  HSimulate* sim = HSimulate::cast(hinstr->next());
738  ASSERT(instruction_pending_deoptimization_environment_ == NULL);
739  ASSERT(pending_deoptimization_ast_id_ == AstNode::kNoNumber);
740  instruction_pending_deoptimization_environment_ = instr;
741  pending_deoptimization_ast_id_ = sim->ast_id();
742  }
743 
744  // If instruction does not have side-effects lazy deoptimization
745  // after the call will try to deoptimize to the point before the call.
746  // Thus we still need to attach environment to this call even if
747  // call sequence can not deoptimize eagerly.
748  bool needs_environment =
749  (can_deoptimize == CAN_DEOPTIMIZE_EAGERLY) ||
750  !hinstr->HasObservableSideEffects();
751  if (needs_environment && !instr->HasEnvironment()) {
752  instr = AssignEnvironment(instr);
753  }
754 
755  return instr;
756 }
757 
758 
759 LInstruction* LChunkBuilder::AssignPointerMap(LInstruction* instr) {
760  ASSERT(!instr->HasPointerMap());
761  instr->set_pointer_map(new(zone()) LPointerMap(position_, zone()));
762  return instr;
763 }
764 
765 
766 LUnallocated* LChunkBuilder::TempRegister() {
767  LUnallocated* operand =
768  new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER);
769  operand->set_virtual_register(allocator_->GetVirtualRegister());
770  if (!allocator_->AllocationOk()) Abort("Not enough virtual registers.");
771  return operand;
772 }
773 
774 
775 LOperand* LChunkBuilder::FixedTemp(Register reg) {
776  LUnallocated* operand = ToUnallocated(reg);
777  ASSERT(operand->HasFixedPolicy());
778  return operand;
779 }
780 
781 
782 LOperand* LChunkBuilder::FixedTemp(XMMRegister reg) {
783  LUnallocated* operand = ToUnallocated(reg);
784  ASSERT(operand->HasFixedPolicy());
785  return operand;
786 }
787 
788 
789 LInstruction* LChunkBuilder::DoBlockEntry(HBlockEntry* instr) {
790  return new(zone()) LLabel(instr->block());
791 }
792 
793 
794 LInstruction* LChunkBuilder::DoSoftDeoptimize(HSoftDeoptimize* instr) {
795  return AssignEnvironment(new(zone()) LDeoptimize);
796 }
797 
798 
799 LInstruction* LChunkBuilder::DoDeoptimize(HDeoptimize* instr) {
800  return AssignEnvironment(new(zone()) LDeoptimize);
801 }
802 
803 
804 LInstruction* LChunkBuilder::DoShift(Token::Value op,
805  HBitwiseBinaryOperation* instr) {
806  if (instr->representation().IsTagged()) {
807  ASSERT(instr->left()->representation().IsTagged());
808  ASSERT(instr->right()->representation().IsTagged());
809 
810  LOperand* left = UseFixed(instr->left(), rdx);
811  LOperand* right = UseFixed(instr->right(), rax);
812  LArithmeticT* result = new(zone()) LArithmeticT(op, left, right);
813  return MarkAsCall(DefineFixed(result, rax), instr);
814  }
815 
816  ASSERT(instr->representation().IsInteger32());
817  ASSERT(instr->left()->representation().IsInteger32());
818  ASSERT(instr->right()->representation().IsInteger32());
819  LOperand* left = UseRegisterAtStart(instr->left());
820 
821  HValue* right_value = instr->right();
822  LOperand* right = NULL;
823  int constant_value = 0;
824  if (right_value->IsConstant()) {
825  HConstant* constant = HConstant::cast(right_value);
826  right = chunk_->DefineConstantOperand(constant);
827  constant_value = constant->Integer32Value() & 0x1f;
828  } else {
829  right = UseFixed(right_value, rcx);
830  }
831 
832  // Shift operations can only deoptimize if we do a logical shift by 0 and
833  // the result cannot be truncated to int32.
834  bool may_deopt = (op == Token::SHR && constant_value == 0);
835  bool does_deopt = false;
836  if (may_deopt) {
837  for (HUseIterator it(instr->uses()); !it.Done(); it.Advance()) {
838  if (!it.value()->CheckFlag(HValue::kTruncatingToInt32)) {
839  does_deopt = true;
840  break;
841  }
842  }
843  }
844 
845  LInstruction* result =
846  DefineSameAsFirst(new(zone()) LShiftI(op, left, right, does_deopt));
847  return does_deopt ? AssignEnvironment(result) : result;
848 }
849 
850 
851 LInstruction* LChunkBuilder::DoArithmeticD(Token::Value op,
852  HArithmeticBinaryOperation* instr) {
853  ASSERT(instr->representation().IsDouble());
854  ASSERT(instr->left()->representation().IsDouble());
855  ASSERT(instr->right()->representation().IsDouble());
856  ASSERT(op != Token::MOD);
857  LOperand* left = UseRegisterAtStart(instr->left());
858  LOperand* right = UseRegisterAtStart(instr->right());
859  LArithmeticD* result = new(zone()) LArithmeticD(op, left, right);
860  return DefineSameAsFirst(result);
861 }
862 
863 
864 LInstruction* LChunkBuilder::DoArithmeticT(Token::Value op,
865  HArithmeticBinaryOperation* instr) {
866  ASSERT(op == Token::ADD ||
867  op == Token::DIV ||
868  op == Token::MOD ||
869  op == Token::MUL ||
870  op == Token::SUB);
871  HValue* left = instr->left();
872  HValue* right = instr->right();
873  ASSERT(left->representation().IsTagged());
874  ASSERT(right->representation().IsTagged());
875  LOperand* left_operand = UseFixed(left, rdx);
876  LOperand* right_operand = UseFixed(right, rax);
877  LArithmeticT* result =
878  new(zone()) LArithmeticT(op, left_operand, right_operand);
879  return MarkAsCall(DefineFixed(result, rax), instr);
880 }
881 
882 
883 void LChunkBuilder::DoBasicBlock(HBasicBlock* block, HBasicBlock* next_block) {
884  ASSERT(is_building());
885  current_block_ = block;
886  next_block_ = next_block;
887  if (block->IsStartBlock()) {
888  block->UpdateEnvironment(graph_->start_environment());
889  argument_count_ = 0;
890  } else if (block->predecessors()->length() == 1) {
891  // We have a single predecessor => copy environment and outgoing
892  // argument count from the predecessor.
893  ASSERT(block->phis()->length() == 0);
894  HBasicBlock* pred = block->predecessors()->at(0);
895  HEnvironment* last_environment = pred->last_environment();
896  ASSERT(last_environment != NULL);
897  // Only copy the environment, if it is later used again.
898  if (pred->end()->SecondSuccessor() == NULL) {
899  ASSERT(pred->end()->FirstSuccessor() == block);
900  } else {
901  if (pred->end()->FirstSuccessor()->block_id() > block->block_id() ||
902  pred->end()->SecondSuccessor()->block_id() > block->block_id()) {
903  last_environment = last_environment->Copy();
904  }
905  }
906  block->UpdateEnvironment(last_environment);
907  ASSERT(pred->argument_count() >= 0);
908  argument_count_ = pred->argument_count();
909  } else {
910  // We are at a state join => process phis.
911  HBasicBlock* pred = block->predecessors()->at(0);
912  // No need to copy the environment, it cannot be used later.
913  HEnvironment* last_environment = pred->last_environment();
914  for (int i = 0; i < block->phis()->length(); ++i) {
915  HPhi* phi = block->phis()->at(i);
916  last_environment->SetValueAt(phi->merged_index(), phi);
917  }
918  for (int i = 0; i < block->deleted_phis()->length(); ++i) {
919  last_environment->SetValueAt(block->deleted_phis()->at(i),
920  graph_->GetConstantUndefined());
921  }
922  block->UpdateEnvironment(last_environment);
923  // Pick up the outgoing argument count of one of the predecessors.
924  argument_count_ = pred->argument_count();
925  }
926  HInstruction* current = block->first();
927  int start = chunk_->instructions()->length();
928  while (current != NULL && !is_aborted()) {
929  // Code for constants in registers is generated lazily.
930  if (!current->EmitAtUses()) {
931  VisitInstruction(current);
932  }
933  current = current->next();
934  }
935  int end = chunk_->instructions()->length() - 1;
936  if (end >= start) {
937  block->set_first_instruction_index(start);
938  block->set_last_instruction_index(end);
939  }
940  block->set_argument_count(argument_count_);
941  next_block_ = NULL;
942  current_block_ = NULL;
943 }
944 
945 
946 void LChunkBuilder::VisitInstruction(HInstruction* current) {
947  HInstruction* old_current = current_instruction_;
948  current_instruction_ = current;
949  if (current->has_position()) position_ = current->position();
950  LInstruction* instr = current->CompileToLithium(this);
951 
952  if (instr != NULL) {
953  if (FLAG_stress_pointer_maps && !instr->HasPointerMap()) {
954  instr = AssignPointerMap(instr);
955  }
956  if (FLAG_stress_environments && !instr->HasEnvironment()) {
957  instr = AssignEnvironment(instr);
958  }
959  instr->set_hydrogen_value(current);
960  chunk_->AddInstruction(instr, current_block_);
961  }
962  current_instruction_ = old_current;
963 }
964 
965 
966 LEnvironment* LChunkBuilder::CreateEnvironment(
967  HEnvironment* hydrogen_env,
968  int* argument_index_accumulator) {
969  if (hydrogen_env == NULL) return NULL;
970 
971  LEnvironment* outer =
972  CreateEnvironment(hydrogen_env->outer(), argument_index_accumulator);
973  int ast_id = hydrogen_env->ast_id();
974  ASSERT(ast_id != AstNode::kNoNumber ||
975  hydrogen_env->frame_type() != JS_FUNCTION);
976  int value_count = hydrogen_env->length();
977  LEnvironment* result = new(zone()) LEnvironment(
978  hydrogen_env->closure(),
979  hydrogen_env->frame_type(),
980  ast_id,
981  hydrogen_env->parameter_count(),
982  argument_count_,
983  value_count,
984  outer,
985  zone());
986  int argument_index = *argument_index_accumulator;
987  for (int i = 0; i < value_count; ++i) {
988  if (hydrogen_env->is_special_index(i)) continue;
989 
990  HValue* value = hydrogen_env->values()->at(i);
991  LOperand* op = NULL;
992  if (value->IsArgumentsObject()) {
993  op = NULL;
994  } else if (value->IsPushArgument()) {
995  op = new(zone()) LArgument(argument_index++);
996  } else {
997  op = UseAny(value);
998  }
999  result->AddValue(op, value->representation());
1000  }
1001 
1002  if (hydrogen_env->frame_type() == JS_FUNCTION) {
1003  *argument_index_accumulator = argument_index;
1004  }
1005 
1006  return result;
1007 }
1008 
1009 
1010 LInstruction* LChunkBuilder::DoGoto(HGoto* instr) {
1011  return new(zone()) LGoto(instr->FirstSuccessor()->block_id());
1012 }
1013 
1014 
1015 LInstruction* LChunkBuilder::DoBranch(HBranch* instr) {
1016  HValue* value = instr->value();
1017  if (value->EmitAtUses()) {
1018  ASSERT(value->IsConstant());
1019  ASSERT(!value->representation().IsDouble());
1020  HBasicBlock* successor = HConstant::cast(value)->ToBoolean()
1021  ? instr->FirstSuccessor()
1022  : instr->SecondSuccessor();
1023  return new(zone()) LGoto(successor->block_id());
1024  }
1025 
1026  LBranch* result = new(zone()) LBranch(UseRegister(value));
1027  // Tagged values that are not known smis or booleans require a
1028  // deoptimization environment.
1029  Representation rep = value->representation();
1030  HType type = value->type();
1031  if (rep.IsTagged() && !type.IsSmi() && !type.IsBoolean()) {
1032  return AssignEnvironment(result);
1033  }
1034  return result;
1035 }
1036 
1037 
1038 LInstruction* LChunkBuilder::DoCompareMap(HCompareMap* instr) {
1039  ASSERT(instr->value()->representation().IsTagged());
1040  LOperand* value = UseRegisterAtStart(instr->value());
1041  return new(zone()) LCmpMapAndBranch(value);
1042 }
1043 
1044 
1045 LInstruction* LChunkBuilder::DoArgumentsLength(HArgumentsLength* length) {
1046  return DefineAsRegister(new(zone()) LArgumentsLength(Use(length->value())));
1047 }
1048 
1049 
1050 LInstruction* LChunkBuilder::DoArgumentsElements(HArgumentsElements* elems) {
1051  return DefineAsRegister(new(zone()) LArgumentsElements);
1052 }
1053 
1054 
1055 LInstruction* LChunkBuilder::DoInstanceOf(HInstanceOf* instr) {
1056  LOperand* left = UseFixed(instr->left(), rax);
1057  LOperand* right = UseFixed(instr->right(), rdx);
1058  LInstanceOf* result = new(zone()) LInstanceOf(left, right);
1059  return MarkAsCall(DefineFixed(result, rax), instr);
1060 }
1061 
1062 
1063 LInstruction* LChunkBuilder::DoInstanceOfKnownGlobal(
1064  HInstanceOfKnownGlobal* instr) {
1065  LInstanceOfKnownGlobal* result =
1066  new(zone()) LInstanceOfKnownGlobal(UseFixed(instr->left(), rax),
1067  FixedTemp(rdi));
1068  return MarkAsCall(DefineFixed(result, rax), instr);
1069 }
1070 
1071 
1072 LInstruction* LChunkBuilder::DoWrapReceiver(HWrapReceiver* instr) {
1073  LOperand* receiver = UseRegister(instr->receiver());
1074  LOperand* function = UseRegisterAtStart(instr->function());
1075  LWrapReceiver* result = new(zone()) LWrapReceiver(receiver, function);
1076  return AssignEnvironment(DefineSameAsFirst(result));
1077 }
1078 
1079 
1080 LInstruction* LChunkBuilder::DoApplyArguments(HApplyArguments* instr) {
1081  LOperand* function = UseFixed(instr->function(), rdi);
1082  LOperand* receiver = UseFixed(instr->receiver(), rax);
1083  LOperand* length = UseFixed(instr->length(), rbx);
1084  LOperand* elements = UseFixed(instr->elements(), rcx);
1085  LApplyArguments* result = new(zone()) LApplyArguments(function,
1086  receiver,
1087  length,
1088  elements);
1089  return MarkAsCall(DefineFixed(result, rax), instr, CAN_DEOPTIMIZE_EAGERLY);
1090 }
1091 
1092 
1093 LInstruction* LChunkBuilder::DoPushArgument(HPushArgument* instr) {
1094  ++argument_count_;
1095  LOperand* argument = UseOrConstant(instr->argument());
1096  return new(zone()) LPushArgument(argument);
1097 }
1098 
1099 
1100 LInstruction* LChunkBuilder::DoThisFunction(HThisFunction* instr) {
1101  return instr->HasNoUses()
1102  ? NULL
1103  : DefineAsRegister(new(zone()) LThisFunction);
1104 }
1105 
1106 
1107 LInstruction* LChunkBuilder::DoContext(HContext* instr) {
1108  return instr->HasNoUses() ? NULL : DefineAsRegister(new(zone()) LContext);
1109 }
1110 
1111 
1112 LInstruction* LChunkBuilder::DoOuterContext(HOuterContext* instr) {
1113  LOperand* context = UseRegisterAtStart(instr->value());
1114  return DefineAsRegister(new(zone()) LOuterContext(context));
1115 }
1116 
1117 
1118 LInstruction* LChunkBuilder::DoDeclareGlobals(HDeclareGlobals* instr) {
1119  return MarkAsCall(new(zone()) LDeclareGlobals, instr);
1120 }
1121 
1122 
1123 LInstruction* LChunkBuilder::DoGlobalObject(HGlobalObject* instr) {
1124  return DefineAsRegister(new(zone()) LGlobalObject);
1125 }
1126 
1127 
1128 LInstruction* LChunkBuilder::DoGlobalReceiver(HGlobalReceiver* instr) {
1129  LOperand* global_object = UseRegisterAtStart(instr->value());
1130  return DefineAsRegister(new(zone()) LGlobalReceiver(global_object));
1131 }
1132 
1133 
1134 LInstruction* LChunkBuilder::DoCallConstantFunction(
1135  HCallConstantFunction* instr) {
1136  argument_count_ -= instr->argument_count();
1137  return MarkAsCall(DefineFixed(new(zone()) LCallConstantFunction, rax), instr);
1138 }
1139 
1140 
1141 LInstruction* LChunkBuilder::DoInvokeFunction(HInvokeFunction* instr) {
1142  LOperand* function = UseFixed(instr->function(), rdi);
1143  argument_count_ -= instr->argument_count();
1144  LInvokeFunction* result = new(zone()) LInvokeFunction(function);
1145  return MarkAsCall(DefineFixed(result, rax), instr, CANNOT_DEOPTIMIZE_EAGERLY);
1146 }
1147 
1148 
1149 LInstruction* LChunkBuilder::DoUnaryMathOperation(HUnaryMathOperation* instr) {
1150  BuiltinFunctionId op = instr->op();
1151  if (op == kMathLog || op == kMathSin || op == kMathCos || op == kMathTan) {
1152  LOperand* input = UseFixedDouble(instr->value(), xmm1);
1153  LUnaryMathOperation* result = new(zone()) LUnaryMathOperation(input);
1154  return MarkAsCall(DefineFixedDouble(result, xmm1), instr);
1155  } else {
1156  LOperand* input = UseRegisterAtStart(instr->value());
1157  LUnaryMathOperation* result = new(zone()) LUnaryMathOperation(input);
1158  switch (op) {
1159  case kMathAbs:
1160  return AssignEnvironment(AssignPointerMap(DefineSameAsFirst(result)));
1161  case kMathFloor:
1162  return AssignEnvironment(DefineAsRegister(result));
1163  case kMathRound:
1164  return AssignEnvironment(DefineAsRegister(result));
1165  case kMathSqrt:
1166  return DefineSameAsFirst(result);
1167  case kMathPowHalf:
1168  return DefineSameAsFirst(result);
1169  default:
1170  UNREACHABLE();
1171  return NULL;
1172  }
1173  }
1174 }
1175 
1176 
1177 LInstruction* LChunkBuilder::DoCallKeyed(HCallKeyed* instr) {
1178  ASSERT(instr->key()->representation().IsTagged());
1179  LOperand* key = UseFixed(instr->key(), rcx);
1180  argument_count_ -= instr->argument_count();
1181  LCallKeyed* result = new(zone()) LCallKeyed(key);
1182  return MarkAsCall(DefineFixed(result, rax), instr);
1183 }
1184 
1185 
1186 LInstruction* LChunkBuilder::DoCallNamed(HCallNamed* instr) {
1187  argument_count_ -= instr->argument_count();
1188  return MarkAsCall(DefineFixed(new(zone()) LCallNamed, rax), instr);
1189 }
1190 
1191 
1192 LInstruction* LChunkBuilder::DoCallGlobal(HCallGlobal* instr) {
1193  argument_count_ -= instr->argument_count();
1194  return MarkAsCall(DefineFixed(new(zone()) LCallGlobal, rax), instr);
1195 }
1196 
1197 
1198 LInstruction* LChunkBuilder::DoCallKnownGlobal(HCallKnownGlobal* instr) {
1199  argument_count_ -= instr->argument_count();
1200  return MarkAsCall(DefineFixed(new(zone()) LCallKnownGlobal, rax), instr);
1201 }
1202 
1203 
1204 LInstruction* LChunkBuilder::DoCallNew(HCallNew* instr) {
1205  LOperand* constructor = UseFixed(instr->constructor(), rdi);
1206  argument_count_ -= instr->argument_count();
1207  LCallNew* result = new(zone()) LCallNew(constructor);
1208  return MarkAsCall(DefineFixed(result, rax), instr);
1209 }
1210 
1211 
1212 LInstruction* LChunkBuilder::DoCallFunction(HCallFunction* instr) {
1213  LOperand* function = UseFixed(instr->function(), rdi);
1214  argument_count_ -= instr->argument_count();
1215  LCallFunction* result = new(zone()) LCallFunction(function);
1216  return MarkAsCall(DefineFixed(result, rax), instr);
1217 }
1218 
1219 
1220 LInstruction* LChunkBuilder::DoCallRuntime(HCallRuntime* instr) {
1221  argument_count_ -= instr->argument_count();
1222  return MarkAsCall(DefineFixed(new(zone()) LCallRuntime, rax), instr);
1223 }
1224 
1225 
1226 LInstruction* LChunkBuilder::DoShr(HShr* instr) {
1227  return DoShift(Token::SHR, instr);
1228 }
1229 
1230 
1231 LInstruction* LChunkBuilder::DoSar(HSar* instr) {
1232  return DoShift(Token::SAR, instr);
1233 }
1234 
1235 
1236 LInstruction* LChunkBuilder::DoShl(HShl* instr) {
1237  return DoShift(Token::SHL, instr);
1238 }
1239 
1240 
1241 LInstruction* LChunkBuilder::DoBitwise(HBitwise* instr) {
1242  if (instr->representation().IsInteger32()) {
1243  ASSERT(instr->left()->representation().IsInteger32());
1244  ASSERT(instr->right()->representation().IsInteger32());
1245 
1246  LOperand* left = UseRegisterAtStart(instr->LeastConstantOperand());
1247  LOperand* right = UseOrConstantAtStart(instr->MostConstantOperand());
1248  return DefineSameAsFirst(new(zone()) LBitI(left, right));
1249  } else {
1250  ASSERT(instr->representation().IsTagged());
1251  ASSERT(instr->left()->representation().IsTagged());
1252  ASSERT(instr->right()->representation().IsTagged());
1253 
1254  LOperand* left = UseFixed(instr->left(), rdx);
1255  LOperand* right = UseFixed(instr->right(), rax);
1256  LArithmeticT* result = new(zone()) LArithmeticT(instr->op(), left, right);
1257  return MarkAsCall(DefineFixed(result, rax), instr);
1258  }
1259 }
1260 
1261 
1262 LInstruction* LChunkBuilder::DoBitNot(HBitNot* instr) {
1263  ASSERT(instr->value()->representation().IsInteger32());
1264  ASSERT(instr->representation().IsInteger32());
1265  if (instr->HasNoUses()) return NULL;
1266  LOperand* input = UseRegisterAtStart(instr->value());
1267  LBitNotI* result = new(zone()) LBitNotI(input);
1268  return DefineSameAsFirst(result);
1269 }
1270 
1271 
1272 LInstruction* LChunkBuilder::DoDiv(HDiv* instr) {
1273  if (instr->representation().IsDouble()) {
1274  return DoArithmeticD(Token::DIV, instr);
1275  } else if (instr->representation().IsInteger32()) {
1276  // The temporary operand is necessary to ensure that right is not allocated
1277  // into rdx.
1278  LOperand* temp = FixedTemp(rdx);
1279  LOperand* dividend = UseFixed(instr->left(), rax);
1280  LOperand* divisor = UseRegister(instr->right());
1281  LDivI* result = new(zone()) LDivI(dividend, divisor, temp);
1282  return AssignEnvironment(DefineFixed(result, rax));
1283  } else {
1284  ASSERT(instr->representation().IsTagged());
1285  return DoArithmeticT(Token::DIV, instr);
1286  }
1287 }
1288 
1289 
1290 LInstruction* LChunkBuilder::DoMathFloorOfDiv(HMathFloorOfDiv* instr) {
1291  UNIMPLEMENTED();
1292  return NULL;
1293 }
1294 
1295 
1296 LInstruction* LChunkBuilder::DoMod(HMod* instr) {
1297  if (instr->representation().IsInteger32()) {
1298  ASSERT(instr->left()->representation().IsInteger32());
1299  ASSERT(instr->right()->representation().IsInteger32());
1300 
1301  LInstruction* result;
1302  if (instr->HasPowerOf2Divisor()) {
1303  ASSERT(!instr->CheckFlag(HValue::kCanBeDivByZero));
1304  LOperand* value = UseRegisterAtStart(instr->left());
1305  LModI* mod =
1306  new(zone()) LModI(value, UseOrConstant(instr->right()), NULL);
1307  result = DefineSameAsFirst(mod);
1308  } else {
1309  // The temporary operand is necessary to ensure that right is not
1310  // allocated into edx.
1311  LOperand* temp = FixedTemp(rdx);
1312  LOperand* value = UseFixed(instr->left(), rax);
1313  LOperand* divisor = UseRegister(instr->right());
1314  LModI* mod = new(zone()) LModI(value, divisor, temp);
1315  result = DefineFixed(mod, rdx);
1316  }
1317 
1318  return (instr->CheckFlag(HValue::kBailoutOnMinusZero) ||
1319  instr->CheckFlag(HValue::kCanBeDivByZero))
1320  ? AssignEnvironment(result)
1321  : result;
1322  } else if (instr->representation().IsTagged()) {
1323  return DoArithmeticT(Token::MOD, instr);
1324  } else {
1325  ASSERT(instr->representation().IsDouble());
1326  // We call a C function for double modulo. It can't trigger a GC.
1327  // We need to use fixed result register for the call.
1328  // TODO(fschneider): Allow any register as input registers.
1329  LOperand* left = UseFixedDouble(instr->left(), xmm2);
1330  LOperand* right = UseFixedDouble(instr->right(), xmm1);
1331  LArithmeticD* result = new(zone()) LArithmeticD(Token::MOD, left, right);
1332  return MarkAsCall(DefineFixedDouble(result, xmm1), instr);
1333  }
1334 }
1335 
1336 
1337 LInstruction* LChunkBuilder::DoMul(HMul* instr) {
1338  if (instr->representation().IsInteger32()) {
1339  ASSERT(instr->left()->representation().IsInteger32());
1340  ASSERT(instr->right()->representation().IsInteger32());
1341  LOperand* left = UseRegisterAtStart(instr->LeastConstantOperand());
1342  LOperand* right = UseOrConstant(instr->MostConstantOperand());
1343  LMulI* mul = new(zone()) LMulI(left, right);
1344  if (instr->CheckFlag(HValue::kCanOverflow) ||
1345  instr->CheckFlag(HValue::kBailoutOnMinusZero)) {
1346  AssignEnvironment(mul);
1347  }
1348  return DefineSameAsFirst(mul);
1349  } else if (instr->representation().IsDouble()) {
1350  return DoArithmeticD(Token::MUL, instr);
1351  } else {
1352  ASSERT(instr->representation().IsTagged());
1353  return DoArithmeticT(Token::MUL, instr);
1354  }
1355 }
1356 
1357 
1358 LInstruction* LChunkBuilder::DoSub(HSub* instr) {
1359  if (instr->representation().IsInteger32()) {
1360  ASSERT(instr->left()->representation().IsInteger32());
1361  ASSERT(instr->right()->representation().IsInteger32());
1362  LOperand* left = UseRegisterAtStart(instr->left());
1363  LOperand* right = UseOrConstantAtStart(instr->right());
1364  LSubI* sub = new(zone()) LSubI(left, right);
1365  LInstruction* result = DefineSameAsFirst(sub);
1366  if (instr->CheckFlag(HValue::kCanOverflow)) {
1367  result = AssignEnvironment(result);
1368  }
1369  return result;
1370  } else if (instr->representation().IsDouble()) {
1371  return DoArithmeticD(Token::SUB, instr);
1372  } else {
1373  ASSERT(instr->representation().IsTagged());
1374  return DoArithmeticT(Token::SUB, instr);
1375  }
1376 }
1377 
1378 
1379 LInstruction* LChunkBuilder::DoAdd(HAdd* instr) {
1380  if (instr->representation().IsInteger32()) {
1381  ASSERT(instr->left()->representation().IsInteger32());
1382  ASSERT(instr->right()->representation().IsInteger32());
1383  LOperand* left = UseRegisterAtStart(instr->LeastConstantOperand());
1384  LOperand* right = UseOrConstantAtStart(instr->MostConstantOperand());
1385  LAddI* add = new(zone()) LAddI(left, right);
1386  LInstruction* result = DefineSameAsFirst(add);
1387  if (instr->CheckFlag(HValue::kCanOverflow)) {
1388  result = AssignEnvironment(result);
1389  }
1390  return result;
1391  } else if (instr->representation().IsDouble()) {
1392  return DoArithmeticD(Token::ADD, instr);
1393  } else {
1394  ASSERT(instr->representation().IsTagged());
1395  return DoArithmeticT(Token::ADD, instr);
1396  }
1397  return NULL;
1398 }
1399 
1400 
1401 LInstruction* LChunkBuilder::DoPower(HPower* instr) {
1402  ASSERT(instr->representation().IsDouble());
1403  // We call a C function for double power. It can't trigger a GC.
1404  // We need to use fixed result register for the call.
1405  Representation exponent_type = instr->right()->representation();
1406  ASSERT(instr->left()->representation().IsDouble());
1407  LOperand* left = UseFixedDouble(instr->left(), xmm2);
1408  LOperand* right = exponent_type.IsDouble() ?
1409  UseFixedDouble(instr->right(), xmm1) :
1410 #ifdef _WIN64
1411  UseFixed(instr->right(), rdx);
1412 #else
1413  UseFixed(instr->right(), rdi);
1414 #endif
1415  LPower* result = new(zone()) LPower(left, right);
1416  return MarkAsCall(DefineFixedDouble(result, xmm3), instr,
1417  CAN_DEOPTIMIZE_EAGERLY);
1418 }
1419 
1420 
1421 LInstruction* LChunkBuilder::DoRandom(HRandom* instr) {
1422  ASSERT(instr->representation().IsDouble());
1423  ASSERT(instr->global_object()->representation().IsTagged());
1424 #ifdef _WIN64
1425  LOperand* global_object = UseFixed(instr->global_object(), rcx);
1426 #else
1427  LOperand* global_object = UseFixed(instr->global_object(), rdi);
1428 #endif
1429  LRandom* result = new(zone()) LRandom(global_object);
1430  return MarkAsCall(DefineFixedDouble(result, xmm1), instr);
1431 }
1432 
1433 
1434 LInstruction* LChunkBuilder::DoCompareGeneric(HCompareGeneric* instr) {
1435  ASSERT(instr->left()->representation().IsTagged());
1436  ASSERT(instr->right()->representation().IsTagged());
1437  LOperand* left = UseFixed(instr->left(), rdx);
1438  LOperand* right = UseFixed(instr->right(), rax);
1439  LCmpT* result = new(zone()) LCmpT(left, right);
1440  return MarkAsCall(DefineFixed(result, rax), instr);
1441 }
1442 
1443 
1444 LInstruction* LChunkBuilder::DoCompareIDAndBranch(
1445  HCompareIDAndBranch* instr) {
1446  Representation r = instr->GetInputRepresentation();
1447  if (r.IsInteger32()) {
1448  ASSERT(instr->left()->representation().IsInteger32());
1449  ASSERT(instr->right()->representation().IsInteger32());
1450  LOperand* left = UseRegisterOrConstantAtStart(instr->left());
1451  LOperand* right = UseOrConstantAtStart(instr->right());
1452  return new(zone()) LCmpIDAndBranch(left, right);
1453  } else {
1454  ASSERT(r.IsDouble());
1455  ASSERT(instr->left()->representation().IsDouble());
1456  ASSERT(instr->right()->representation().IsDouble());
1457  LOperand* left;
1458  LOperand* right;
1459  if (instr->left()->IsConstant() && instr->right()->IsConstant()) {
1460  left = UseRegisterOrConstantAtStart(instr->left());
1461  right = UseRegisterOrConstantAtStart(instr->right());
1462  } else {
1463  left = UseRegisterAtStart(instr->left());
1464  right = UseRegisterAtStart(instr->right());
1465  }
1466  return new(zone()) LCmpIDAndBranch(left, right);
1467  }
1468 }
1469 
1470 
1471 LInstruction* LChunkBuilder::DoCompareObjectEqAndBranch(
1472  HCompareObjectEqAndBranch* instr) {
1473  LOperand* left = UseRegisterAtStart(instr->left());
1474  LOperand* right = UseRegisterAtStart(instr->right());
1475  return new(zone()) LCmpObjectEqAndBranch(left, right);
1476 }
1477 
1478 
1479 LInstruction* LChunkBuilder::DoCompareConstantEqAndBranch(
1480  HCompareConstantEqAndBranch* instr) {
1481  LOperand* value = UseRegisterAtStart(instr->value());
1482  return new(zone()) LCmpConstantEqAndBranch(value);
1483 }
1484 
1485 
1486 LInstruction* LChunkBuilder::DoIsNilAndBranch(HIsNilAndBranch* instr) {
1487  ASSERT(instr->value()->representation().IsTagged());
1488  LOperand* temp = instr->kind() == kStrictEquality ? NULL : TempRegister();
1489  return new(zone()) LIsNilAndBranch(UseRegisterAtStart(instr->value()), temp);
1490 }
1491 
1492 
1493 LInstruction* LChunkBuilder::DoIsObjectAndBranch(HIsObjectAndBranch* instr) {
1494  ASSERT(instr->value()->representation().IsTagged());
1495  return new(zone()) LIsObjectAndBranch(UseRegisterAtStart(instr->value()));
1496 }
1497 
1498 
1499 LInstruction* LChunkBuilder::DoIsStringAndBranch(HIsStringAndBranch* instr) {
1500  ASSERT(instr->value()->representation().IsTagged());
1501  LOperand* value = UseRegisterAtStart(instr->value());
1502  LOperand* temp = TempRegister();
1503  return new(zone()) LIsStringAndBranch(value, temp);
1504 }
1505 
1506 
1507 LInstruction* LChunkBuilder::DoIsSmiAndBranch(HIsSmiAndBranch* instr) {
1508  ASSERT(instr->value()->representation().IsTagged());
1509  return new(zone()) LIsSmiAndBranch(Use(instr->value()));
1510 }
1511 
1512 
1513 LInstruction* LChunkBuilder::DoIsUndetectableAndBranch(
1514  HIsUndetectableAndBranch* instr) {
1515  ASSERT(instr->value()->representation().IsTagged());
1516  LOperand* value = UseRegisterAtStart(instr->value());
1517  LOperand* temp = TempRegister();
1518  return new(zone()) LIsUndetectableAndBranch(value, temp);
1519 }
1520 
1521 
1522 LInstruction* LChunkBuilder::DoStringCompareAndBranch(
1523  HStringCompareAndBranch* instr) {
1524 
1525  ASSERT(instr->left()->representation().IsTagged());
1526  ASSERT(instr->right()->representation().IsTagged());
1527  LOperand* left = UseFixed(instr->left(), rdx);
1528  LOperand* right = UseFixed(instr->right(), rax);
1529  LStringCompareAndBranch* result =
1530  new(zone()) LStringCompareAndBranch(left, right);
1531 
1532  return MarkAsCall(result, instr);
1533 }
1534 
1535 
1536 LInstruction* LChunkBuilder::DoHasInstanceTypeAndBranch(
1537  HHasInstanceTypeAndBranch* instr) {
1538  ASSERT(instr->value()->representation().IsTagged());
1539  LOperand* value = UseRegisterAtStart(instr->value());
1540  return new(zone()) LHasInstanceTypeAndBranch(value);
1541 }
1542 
1543 
1544 LInstruction* LChunkBuilder::DoGetCachedArrayIndex(
1545  HGetCachedArrayIndex* instr) {
1546  ASSERT(instr->value()->representation().IsTagged());
1547  LOperand* value = UseRegisterAtStart(instr->value());
1548 
1549  return DefineAsRegister(new(zone()) LGetCachedArrayIndex(value));
1550 }
1551 
1552 
1553 LInstruction* LChunkBuilder::DoHasCachedArrayIndexAndBranch(
1554  HHasCachedArrayIndexAndBranch* instr) {
1555  ASSERT(instr->value()->representation().IsTagged());
1556  LOperand* value = UseRegisterAtStart(instr->value());
1557  return new(zone()) LHasCachedArrayIndexAndBranch(value);
1558 }
1559 
1560 
1561 LInstruction* LChunkBuilder::DoClassOfTestAndBranch(
1562  HClassOfTestAndBranch* instr) {
1563  LOperand* value = UseRegister(instr->value());
1564  return new(zone()) LClassOfTestAndBranch(value,
1565  TempRegister(),
1566  TempRegister());
1567 }
1568 
1569 
1570 LInstruction* LChunkBuilder::DoJSArrayLength(HJSArrayLength* instr) {
1571  LOperand* array = UseRegisterAtStart(instr->value());
1572  return DefineAsRegister(new(zone()) LJSArrayLength(array));
1573 }
1574 
1575 
1576 LInstruction* LChunkBuilder::DoFixedArrayBaseLength(
1577  HFixedArrayBaseLength* instr) {
1578  LOperand* array = UseRegisterAtStart(instr->value());
1579  return DefineAsRegister(new(zone()) LFixedArrayBaseLength(array));
1580 }
1581 
1582 
1583 LInstruction* LChunkBuilder::DoElementsKind(HElementsKind* instr) {
1584  LOperand* object = UseRegisterAtStart(instr->value());
1585  return DefineAsRegister(new(zone()) LElementsKind(object));
1586 }
1587 
1588 
1589 LInstruction* LChunkBuilder::DoValueOf(HValueOf* instr) {
1590  LOperand* object = UseRegister(instr->value());
1591  LValueOf* result = new(zone()) LValueOf(object);
1592  return DefineSameAsFirst(result);
1593 }
1594 
1595 
1596 LInstruction* LChunkBuilder::DoDateField(HDateField* instr) {
1597  LOperand* object = UseFixed(instr->value(), rax);
1598  LDateField* result = new(zone()) LDateField(object, instr->index());
1599  return MarkAsCall(DefineFixed(result, rax), instr);
1600 }
1601 
1602 
1603 LInstruction* LChunkBuilder::DoBoundsCheck(HBoundsCheck* instr) {
1604  LOperand* value = UseRegisterOrConstantAtStart(instr->index());
1605  LOperand* length = Use(instr->length());
1606  return AssignEnvironment(new(zone()) LBoundsCheck(value, length));
1607 }
1608 
1609 
1610 LInstruction* LChunkBuilder::DoAbnormalExit(HAbnormalExit* instr) {
1611  // The control instruction marking the end of a block that completed
1612  // abruptly (e.g., threw an exception). There is nothing specific to do.
1613  return NULL;
1614 }
1615 
1616 
1617 LInstruction* LChunkBuilder::DoThrow(HThrow* instr) {
1618  LOperand* value = UseFixed(instr->value(), rax);
1619  return MarkAsCall(new(zone()) LThrow(value), instr);
1620 }
1621 
1622 
1623 LInstruction* LChunkBuilder::DoUseConst(HUseConst* instr) {
1624  return NULL;
1625 }
1626 
1627 
1628 LInstruction* LChunkBuilder::DoForceRepresentation(HForceRepresentation* bad) {
1629  // All HForceRepresentation instructions should be eliminated in the
1630  // representation change phase of Hydrogen.
1631  UNREACHABLE();
1632  return NULL;
1633 }
1634 
1635 
1636 LInstruction* LChunkBuilder::DoChange(HChange* instr) {
1637  Representation from = instr->from();
1638  Representation to = instr->to();
1639  if (from.IsTagged()) {
1640  if (to.IsDouble()) {
1641  LOperand* value = UseRegister(instr->value());
1642  LNumberUntagD* res = new(zone()) LNumberUntagD(value);
1643  return AssignEnvironment(DefineAsRegister(res));
1644  } else {
1645  ASSERT(to.IsInteger32());
1646  LOperand* value = UseRegister(instr->value());
1647  if (instr->value()->type().IsSmi()) {
1648  return DefineSameAsFirst(new(zone()) LSmiUntag(value, false));
1649  } else {
1650  bool truncating = instr->CanTruncateToInt32();
1651  LOperand* xmm_temp = truncating ? NULL : FixedTemp(xmm1);
1652  LTaggedToI* res = new(zone()) LTaggedToI(value, xmm_temp);
1653  return AssignEnvironment(DefineSameAsFirst(res));
1654  }
1655  }
1656  } else if (from.IsDouble()) {
1657  if (to.IsTagged()) {
1658  LOperand* value = UseRegister(instr->value());
1659  LOperand* temp = TempRegister();
1660 
1661  // Make sure that temp and result_temp are different registers.
1662  LUnallocated* result_temp = TempRegister();
1663  LNumberTagD* result = new(zone()) LNumberTagD(value, temp);
1664  return AssignPointerMap(Define(result, result_temp));
1665  } else {
1666  ASSERT(to.IsInteger32());
1667  LOperand* value = UseRegister(instr->value());
1668  return AssignEnvironment(DefineAsRegister(new(zone()) LDoubleToI(value)));
1669  }
1670  } else if (from.IsInteger32()) {
1671  if (to.IsTagged()) {
1672  HValue* val = instr->value();
1673  LOperand* value = UseRegister(val);
1674  if (val->HasRange() && val->range()->IsInSmiRange()) {
1675  return DefineSameAsFirst(new(zone()) LSmiTag(value));
1676  } else {
1677  LNumberTagI* result = new(zone()) LNumberTagI(value);
1678  return AssignEnvironment(AssignPointerMap(DefineSameAsFirst(result)));
1679  }
1680  } else {
1681  ASSERT(to.IsDouble());
1682  LOperand* value = Use(instr->value());
1683  return DefineAsRegister(new(zone()) LInteger32ToDouble(value));
1684  }
1685  }
1686  UNREACHABLE();
1687  return NULL;
1688 }
1689 
1690 
1691 LInstruction* LChunkBuilder::DoCheckNonSmi(HCheckNonSmi* instr) {
1692  LOperand* value = UseRegisterAtStart(instr->value());
1693  return AssignEnvironment(new(zone()) LCheckNonSmi(value));
1694 }
1695 
1696 
1697 LInstruction* LChunkBuilder::DoCheckInstanceType(HCheckInstanceType* instr) {
1698  LOperand* value = UseRegisterAtStart(instr->value());
1699  LCheckInstanceType* result = new(zone()) LCheckInstanceType(value);
1700  return AssignEnvironment(result);
1701 }
1702 
1703 
1704 LInstruction* LChunkBuilder::DoCheckPrototypeMaps(HCheckPrototypeMaps* instr) {
1705  LOperand* temp = TempRegister();
1706  LCheckPrototypeMaps* result = new(zone()) LCheckPrototypeMaps(temp);
1707  return AssignEnvironment(result);
1708 }
1709 
1710 
1711 LInstruction* LChunkBuilder::DoCheckSmi(HCheckSmi* instr) {
1712  LOperand* value = UseRegisterAtStart(instr->value());
1713  return AssignEnvironment(new(zone()) LCheckSmi(value));
1714 }
1715 
1716 
1717 LInstruction* LChunkBuilder::DoCheckFunction(HCheckFunction* instr) {
1718  LOperand* value = UseRegisterAtStart(instr->value());
1719  return AssignEnvironment(new(zone()) LCheckFunction(value));
1720 }
1721 
1722 
1723 LInstruction* LChunkBuilder::DoCheckMaps(HCheckMaps* instr) {
1724  LOperand* value = UseRegisterAtStart(instr->value());
1725  LCheckMaps* result = new(zone()) LCheckMaps(value);
1726  return AssignEnvironment(result);
1727 }
1728 
1729 
1730 LInstruction* LChunkBuilder::DoClampToUint8(HClampToUint8* instr) {
1731  HValue* value = instr->value();
1732  Representation input_rep = value->representation();
1733  LOperand* reg = UseRegister(value);
1734  if (input_rep.IsDouble()) {
1735  return DefineAsRegister(new(zone()) LClampDToUint8(reg,
1736  TempRegister()));
1737  } else if (input_rep.IsInteger32()) {
1738  return DefineSameAsFirst(new(zone()) LClampIToUint8(reg));
1739  } else {
1740  ASSERT(input_rep.IsTagged());
1741  // Register allocator doesn't (yet) support allocation of double
1742  // temps. Reserve xmm1 explicitly.
1743  LClampTToUint8* result = new(zone()) LClampTToUint8(reg,
1744  TempRegister(),
1745  FixedTemp(xmm1));
1746  return AssignEnvironment(DefineSameAsFirst(result));
1747  }
1748 }
1749 
1750 
1751 LInstruction* LChunkBuilder::DoReturn(HReturn* instr) {
1752  return new(zone()) LReturn(UseFixed(instr->value(), rax));
1753 }
1754 
1755 
1756 LInstruction* LChunkBuilder::DoConstant(HConstant* instr) {
1757  Representation r = instr->representation();
1758  if (r.IsInteger32()) {
1759  return DefineAsRegister(new(zone()) LConstantI);
1760  } else if (r.IsDouble()) {
1761  LOperand* temp = TempRegister();
1762  return DefineAsRegister(new(zone()) LConstantD(temp));
1763  } else if (r.IsTagged()) {
1764  return DefineAsRegister(new(zone()) LConstantT);
1765  } else {
1766  UNREACHABLE();
1767  return NULL;
1768  }
1769 }
1770 
1771 
1772 LInstruction* LChunkBuilder::DoLoadGlobalCell(HLoadGlobalCell* instr) {
1773  LLoadGlobalCell* result = new(zone()) LLoadGlobalCell;
1774  return instr->RequiresHoleCheck()
1775  ? AssignEnvironment(DefineAsRegister(result))
1776  : DefineAsRegister(result);
1777 }
1778 
1779 
1780 LInstruction* LChunkBuilder::DoLoadGlobalGeneric(HLoadGlobalGeneric* instr) {
1781  LOperand* global_object = UseFixed(instr->global_object(), rax);
1782  LLoadGlobalGeneric* result = new(zone()) LLoadGlobalGeneric(global_object);
1783  return MarkAsCall(DefineFixed(result, rax), instr);
1784 }
1785 
1786 
1787 LInstruction* LChunkBuilder::DoStoreGlobalCell(HStoreGlobalCell* instr) {
1788  LOperand* value = UseRegister(instr->value());
1789  // Use a temp to avoid reloading the cell value address in the case where
1790  // we perform a hole check.
1791  return instr->RequiresHoleCheck()
1792  ? AssignEnvironment(new(zone()) LStoreGlobalCell(value, TempRegister()))
1793  : new(zone()) LStoreGlobalCell(value, NULL);
1794 }
1795 
1796 
1797 LInstruction* LChunkBuilder::DoStoreGlobalGeneric(HStoreGlobalGeneric* instr) {
1798  LOperand* global_object = UseFixed(instr->global_object(), rdx);
1799  LOperand* value = UseFixed(instr->value(), rax);
1800  LStoreGlobalGeneric* result = new(zone()) LStoreGlobalGeneric(global_object,
1801  value);
1802  return MarkAsCall(result, instr);
1803 }
1804 
1805 
1806 LInstruction* LChunkBuilder::DoLoadContextSlot(HLoadContextSlot* instr) {
1807  LOperand* context = UseRegisterAtStart(instr->value());
1808  LInstruction* result =
1809  DefineAsRegister(new(zone()) LLoadContextSlot(context));
1810  return instr->RequiresHoleCheck() ? AssignEnvironment(result) : result;
1811 }
1812 
1813 
1814 LInstruction* LChunkBuilder::DoStoreContextSlot(HStoreContextSlot* instr) {
1815  LOperand* context;
1816  LOperand* value;
1817  LOperand* temp;
1818  if (instr->NeedsWriteBarrier()) {
1819  context = UseTempRegister(instr->context());
1820  value = UseTempRegister(instr->value());
1821  temp = TempRegister();
1822  } else {
1823  context = UseRegister(instr->context());
1824  value = UseRegister(instr->value());
1825  temp = NULL;
1826  }
1827  LInstruction* result = new(zone()) LStoreContextSlot(context, value, temp);
1828  return instr->RequiresHoleCheck() ? AssignEnvironment(result) : result;
1829 }
1830 
1831 
1832 LInstruction* LChunkBuilder::DoLoadNamedField(HLoadNamedField* instr) {
1833  ASSERT(instr->representation().IsTagged());
1834  LOperand* obj = UseRegisterAtStart(instr->object());
1835  return DefineAsRegister(new(zone()) LLoadNamedField(obj));
1836 }
1837 
1838 
1839 LInstruction* LChunkBuilder::DoLoadNamedFieldPolymorphic(
1840  HLoadNamedFieldPolymorphic* instr) {
1841  ASSERT(instr->representation().IsTagged());
1842  if (instr->need_generic()) {
1843  LOperand* obj = UseFixed(instr->object(), rax);
1844  LLoadNamedFieldPolymorphic* result =
1845  new(zone()) LLoadNamedFieldPolymorphic(obj);
1846  return MarkAsCall(DefineFixed(result, rax), instr);
1847  } else {
1848  LOperand* obj = UseRegisterAtStart(instr->object());
1849  LLoadNamedFieldPolymorphic* result =
1850  new(zone()) LLoadNamedFieldPolymorphic(obj);
1851  return AssignEnvironment(DefineAsRegister(result));
1852  }
1853 }
1854 
1855 
1856 LInstruction* LChunkBuilder::DoLoadNamedGeneric(HLoadNamedGeneric* instr) {
1857  LOperand* object = UseFixed(instr->object(), rax);
1858  LLoadNamedGeneric* result = new(zone()) LLoadNamedGeneric(object);
1859  return MarkAsCall(DefineFixed(result, rax), instr);
1860 }
1861 
1862 
1863 LInstruction* LChunkBuilder::DoLoadFunctionPrototype(
1864  HLoadFunctionPrototype* instr) {
1865  return AssignEnvironment(DefineAsRegister(
1866  new(zone()) LLoadFunctionPrototype(UseRegister(instr->function()))));
1867 }
1868 
1869 
1870 LInstruction* LChunkBuilder::DoLoadElements(HLoadElements* instr) {
1871  LOperand* input = UseRegisterAtStart(instr->value());
1872  return DefineAsRegister(new(zone()) LLoadElements(input));
1873 }
1874 
1875 
1876 LInstruction* LChunkBuilder::DoLoadExternalArrayPointer(
1877  HLoadExternalArrayPointer* instr) {
1878  LOperand* input = UseRegisterAtStart(instr->value());
1879  return DefineAsRegister(new(zone()) LLoadExternalArrayPointer(input));
1880 }
1881 
1882 
1883 LInstruction* LChunkBuilder::DoLoadKeyedFastElement(
1884  HLoadKeyedFastElement* instr) {
1885  ASSERT(instr->representation().IsTagged());
1886  ASSERT(instr->key()->representation().IsInteger32());
1887  LOperand* obj = UseRegisterAtStart(instr->object());
1888  LOperand* key = UseRegisterOrConstantAtStart(instr->key());
1889  LLoadKeyedFastElement* result = new(zone()) LLoadKeyedFastElement(obj, key);
1890  if (instr->RequiresHoleCheck()) AssignEnvironment(result);
1891  return DefineAsRegister(result);
1892 }
1893 
1894 
1895 LInstruction* LChunkBuilder::DoLoadKeyedFastDoubleElement(
1896  HLoadKeyedFastDoubleElement* instr) {
1897  ASSERT(instr->representation().IsDouble());
1898  ASSERT(instr->key()->representation().IsInteger32());
1899  LOperand* elements = UseRegisterAtStart(instr->elements());
1900  LOperand* key = UseRegisterOrConstantAtStart(instr->key());
1901  LLoadKeyedFastDoubleElement* result =
1902  new(zone()) LLoadKeyedFastDoubleElement(elements, key);
1903  return AssignEnvironment(DefineAsRegister(result));
1904 }
1905 
1906 
1907 LInstruction* LChunkBuilder::DoLoadKeyedSpecializedArrayElement(
1908  HLoadKeyedSpecializedArrayElement* instr) {
1909  ElementsKind elements_kind = instr->elements_kind();
1910  ASSERT(
1911  (instr->representation().IsInteger32() &&
1912  (elements_kind != EXTERNAL_FLOAT_ELEMENTS) &&
1913  (elements_kind != EXTERNAL_DOUBLE_ELEMENTS)) ||
1914  (instr->representation().IsDouble() &&
1915  ((elements_kind == EXTERNAL_FLOAT_ELEMENTS) ||
1916  (elements_kind == EXTERNAL_DOUBLE_ELEMENTS))));
1917  ASSERT(instr->key()->representation().IsInteger32());
1918  LOperand* external_pointer = UseRegister(instr->external_pointer());
1919  LOperand* key = UseRegisterOrConstant(instr->key());
1920  LLoadKeyedSpecializedArrayElement* result =
1921  new(zone()) LLoadKeyedSpecializedArrayElement(external_pointer, key);
1922  LInstruction* load_instr = DefineAsRegister(result);
1923  // An unsigned int array load might overflow and cause a deopt, make sure it
1924  // has an environment.
1925  return (elements_kind == EXTERNAL_UNSIGNED_INT_ELEMENTS) ?
1926  AssignEnvironment(load_instr) : load_instr;
1927 }
1928 
1929 
1930 LInstruction* LChunkBuilder::DoLoadKeyedGeneric(HLoadKeyedGeneric* instr) {
1931  LOperand* object = UseFixed(instr->object(), rdx);
1932  LOperand* key = UseFixed(instr->key(), rax);
1933 
1934  LLoadKeyedGeneric* result = new(zone()) LLoadKeyedGeneric(object, key);
1935  return MarkAsCall(DefineFixed(result, rax), instr);
1936 }
1937 
1938 
1939 LInstruction* LChunkBuilder::DoStoreKeyedFastElement(
1940  HStoreKeyedFastElement* instr) {
1941  bool needs_write_barrier = instr->NeedsWriteBarrier();
1942  ASSERT(instr->value()->representation().IsTagged());
1943  ASSERT(instr->object()->representation().IsTagged());
1944  ASSERT(instr->key()->representation().IsInteger32());
1945 
1946  LOperand* obj = UseTempRegister(instr->object());
1947  LOperand* val = needs_write_barrier
1948  ? UseTempRegister(instr->value())
1949  : UseRegisterAtStart(instr->value());
1950  LOperand* key = needs_write_barrier
1951  ? UseTempRegister(instr->key())
1952  : UseRegisterOrConstantAtStart(instr->key());
1953  return new(zone()) LStoreKeyedFastElement(obj, key, val);
1954 }
1955 
1956 
1957 LInstruction* LChunkBuilder::DoStoreKeyedFastDoubleElement(
1958  HStoreKeyedFastDoubleElement* instr) {
1959  ASSERT(instr->value()->representation().IsDouble());
1960  ASSERT(instr->elements()->representation().IsTagged());
1961  ASSERT(instr->key()->representation().IsInteger32());
1962 
1963  LOperand* elements = UseRegisterAtStart(instr->elements());
1964  LOperand* val = UseTempRegister(instr->value());
1965  LOperand* key = UseRegisterOrConstantAtStart(instr->key());
1966 
1967  return new(zone()) LStoreKeyedFastDoubleElement(elements, key, val);
1968 }
1969 
1970 
1971 LInstruction* LChunkBuilder::DoStoreKeyedSpecializedArrayElement(
1972  HStoreKeyedSpecializedArrayElement* instr) {
1973  ElementsKind elements_kind = instr->elements_kind();
1974  ASSERT(
1975  (instr->value()->representation().IsInteger32() &&
1976  (elements_kind != EXTERNAL_FLOAT_ELEMENTS) &&
1977  (elements_kind != EXTERNAL_DOUBLE_ELEMENTS)) ||
1978  (instr->value()->representation().IsDouble() &&
1979  ((elements_kind == EXTERNAL_FLOAT_ELEMENTS) ||
1980  (elements_kind == EXTERNAL_DOUBLE_ELEMENTS))));
1981  ASSERT(instr->external_pointer()->representation().IsExternal());
1982  ASSERT(instr->key()->representation().IsInteger32());
1983 
1984  LOperand* external_pointer = UseRegister(instr->external_pointer());
1985  bool val_is_temp_register =
1986  elements_kind == EXTERNAL_PIXEL_ELEMENTS ||
1987  elements_kind == EXTERNAL_FLOAT_ELEMENTS;
1988  LOperand* val = val_is_temp_register
1989  ? UseTempRegister(instr->value())
1990  : UseRegister(instr->value());
1991  LOperand* key = UseRegisterOrConstant(instr->key());
1992 
1993  return new(zone()) LStoreKeyedSpecializedArrayElement(external_pointer,
1994  key,
1995  val);
1996 }
1997 
1998 
1999 LInstruction* LChunkBuilder::DoStoreKeyedGeneric(HStoreKeyedGeneric* instr) {
2000  LOperand* object = UseFixed(instr->object(), rdx);
2001  LOperand* key = UseFixed(instr->key(), rcx);
2002  LOperand* value = UseFixed(instr->value(), rax);
2003 
2004  ASSERT(instr->object()->representation().IsTagged());
2005  ASSERT(instr->key()->representation().IsTagged());
2006  ASSERT(instr->value()->representation().IsTagged());
2007 
2008  LStoreKeyedGeneric* result =
2009  new(zone()) LStoreKeyedGeneric(object, key, value);
2010  return MarkAsCall(result, instr);
2011 }
2012 
2013 
2014 LInstruction* LChunkBuilder::DoTransitionElementsKind(
2015  HTransitionElementsKind* instr) {
2016  ElementsKind from_kind = instr->original_map()->elements_kind();
2017  ElementsKind to_kind = instr->transitioned_map()->elements_kind();
2018  if (IsSimpleMapChangeTransition(from_kind, to_kind)) {
2019  LOperand* object = UseRegister(instr->object());
2020  LOperand* new_map_reg = TempRegister();
2021  LOperand* temp_reg = TempRegister();
2022  LTransitionElementsKind* result =
2023  new(zone()) LTransitionElementsKind(object, new_map_reg, temp_reg);
2024  return DefineSameAsFirst(result);
2025  } else {
2026  LOperand* object = UseFixed(instr->object(), rax);
2027  LOperand* fixed_object_reg = FixedTemp(rdx);
2028  LOperand* new_map_reg = FixedTemp(rbx);
2029  LTransitionElementsKind* result =
2030  new(zone()) LTransitionElementsKind(object,
2031  new_map_reg,
2032  fixed_object_reg);
2033  return MarkAsCall(DefineFixed(result, rax), instr);
2034  }
2035 }
2036 
2037 
2038 LInstruction* LChunkBuilder::DoStoreNamedField(HStoreNamedField* instr) {
2039  bool needs_write_barrier = instr->NeedsWriteBarrier();
2040  bool needs_write_barrier_for_map = !instr->transition().is_null() &&
2041  instr->NeedsWriteBarrierForMap();
2042 
2043  LOperand* obj;
2044  if (needs_write_barrier) {
2045  obj = instr->is_in_object()
2046  ? UseRegister(instr->object())
2047  : UseTempRegister(instr->object());
2048  } else {
2049  obj = needs_write_barrier_for_map
2050  ? UseRegister(instr->object())
2051  : UseRegisterAtStart(instr->object());
2052  }
2053 
2054  LOperand* val = needs_write_barrier
2055  ? UseTempRegister(instr->value())
2056  : UseRegister(instr->value());
2057 
2058  // We only need a scratch register if we have a write barrier or we
2059  // have a store into the properties array (not in-object-property).
2060  LOperand* temp = (!instr->is_in_object() || needs_write_barrier ||
2061  needs_write_barrier_for_map) ? TempRegister() : NULL;
2062 
2063  return new(zone()) LStoreNamedField(obj, val, temp);
2064 }
2065 
2066 
2067 LInstruction* LChunkBuilder::DoStoreNamedGeneric(HStoreNamedGeneric* instr) {
2068  LOperand* object = UseFixed(instr->object(), rdx);
2069  LOperand* value = UseFixed(instr->value(), rax);
2070 
2071  LStoreNamedGeneric* result = new(zone()) LStoreNamedGeneric(object, value);
2072  return MarkAsCall(result, instr);
2073 }
2074 
2075 
2076 LInstruction* LChunkBuilder::DoStringAdd(HStringAdd* instr) {
2077  LOperand* left = UseOrConstantAtStart(instr->left());
2078  LOperand* right = UseOrConstantAtStart(instr->right());
2079  return MarkAsCall(DefineFixed(new(zone()) LStringAdd(left, right), rax),
2080  instr);
2081 }
2082 
2083 
2084 LInstruction* LChunkBuilder::DoStringCharCodeAt(HStringCharCodeAt* instr) {
2085  LOperand* string = UseTempRegister(instr->string());
2086  LOperand* index = UseTempRegister(instr->index());
2087  LStringCharCodeAt* result = new(zone()) LStringCharCodeAt(string, index);
2088  return AssignEnvironment(AssignPointerMap(DefineAsRegister(result)));
2089 }
2090 
2091 
2092 LInstruction* LChunkBuilder::DoStringCharFromCode(HStringCharFromCode* instr) {
2093  LOperand* char_code = UseRegister(instr->value());
2094  LStringCharFromCode* result = new(zone()) LStringCharFromCode(char_code);
2095  return AssignPointerMap(DefineAsRegister(result));
2096 }
2097 
2098 
2099 LInstruction* LChunkBuilder::DoStringLength(HStringLength* instr) {
2100  LOperand* string = UseRegisterAtStart(instr->value());
2101  return DefineAsRegister(new(zone()) LStringLength(string));
2102 }
2103 
2104 
2105 LInstruction* LChunkBuilder::DoAllocateObject(HAllocateObject* instr) {
2106  LAllocateObject* result = new(zone()) LAllocateObject(TempRegister());
2107  return AssignPointerMap(DefineAsRegister(result));
2108 }
2109 
2110 
2111 LInstruction* LChunkBuilder::DoFastLiteral(HFastLiteral* instr) {
2112  return MarkAsCall(DefineFixed(new(zone()) LFastLiteral, rax), instr);
2113 }
2114 
2115 
2116 LInstruction* LChunkBuilder::DoArrayLiteral(HArrayLiteral* instr) {
2117  return MarkAsCall(DefineFixed(new(zone()) LArrayLiteral, rax), instr);
2118 }
2119 
2120 
2121 LInstruction* LChunkBuilder::DoObjectLiteral(HObjectLiteral* instr) {
2122  return MarkAsCall(DefineFixed(new(zone()) LObjectLiteral, rax), instr);
2123 }
2124 
2125 
2126 LInstruction* LChunkBuilder::DoRegExpLiteral(HRegExpLiteral* instr) {
2127  return MarkAsCall(DefineFixed(new(zone()) LRegExpLiteral, rax), instr);
2128 }
2129 
2130 
2131 LInstruction* LChunkBuilder::DoFunctionLiteral(HFunctionLiteral* instr) {
2132  return MarkAsCall(DefineFixed(new(zone()) LFunctionLiteral, rax), instr);
2133 }
2134 
2135 
2136 LInstruction* LChunkBuilder::DoDeleteProperty(HDeleteProperty* instr) {
2137  LOperand* object = UseAtStart(instr->object());
2138  LOperand* key = UseOrConstantAtStart(instr->key());
2139  LDeleteProperty* result = new(zone()) LDeleteProperty(object, key);
2140  return MarkAsCall(DefineFixed(result, rax), instr);
2141 }
2142 
2143 
2144 LInstruction* LChunkBuilder::DoOsrEntry(HOsrEntry* instr) {
2145  allocator_->MarkAsOsrEntry();
2146  current_block_->last_environment()->set_ast_id(instr->ast_id());
2147  return AssignEnvironment(new(zone()) LOsrEntry);
2148 }
2149 
2150 
2151 LInstruction* LChunkBuilder::DoParameter(HParameter* instr) {
2152  int spill_index = chunk()->GetParameterStackSlot(instr->index());
2153  return DefineAsSpilled(new(zone()) LParameter, spill_index);
2154 }
2155 
2156 
2157 LInstruction* LChunkBuilder::DoUnknownOSRValue(HUnknownOSRValue* instr) {
2158  int spill_index = chunk()->GetNextSpillIndex(false); // Not double-width.
2159  if (spill_index > LUnallocated::kMaxFixedIndex) {
2160  Abort("Too many spill slots needed for OSR");
2161  spill_index = 0;
2162  }
2163  return DefineAsSpilled(new(zone()) LUnknownOSRValue, spill_index);
2164 }
2165 
2166 
2167 LInstruction* LChunkBuilder::DoCallStub(HCallStub* instr) {
2168  argument_count_ -= instr->argument_count();
2169  return MarkAsCall(DefineFixed(new(zone()) LCallStub, rax), instr);
2170 }
2171 
2172 
2173 LInstruction* LChunkBuilder::DoArgumentsObject(HArgumentsObject* instr) {
2174  // There are no real uses of the arguments object.
2175  // arguments.length and element access are supported directly on
2176  // stack arguments, and any real arguments object use causes a bailout.
2177  // So this value is never used.
2178  return NULL;
2179 }
2180 
2181 
2182 LInstruction* LChunkBuilder::DoAccessArgumentsAt(HAccessArgumentsAt* instr) {
2183  LOperand* arguments = UseRegister(instr->arguments());
2184  LOperand* length = UseTempRegister(instr->length());
2185  LOperand* index = Use(instr->index());
2186  LAccessArgumentsAt* result =
2187  new(zone()) LAccessArgumentsAt(arguments, length, index);
2188  return AssignEnvironment(DefineAsRegister(result));
2189 }
2190 
2191 
2192 LInstruction* LChunkBuilder::DoToFastProperties(HToFastProperties* instr) {
2193  LOperand* object = UseFixed(instr->value(), rax);
2194  LToFastProperties* result = new(zone()) LToFastProperties(object);
2195  return MarkAsCall(DefineFixed(result, rax), instr);
2196 }
2197 
2198 
2199 LInstruction* LChunkBuilder::DoTypeof(HTypeof* instr) {
2200  LTypeof* result = new(zone()) LTypeof(UseAtStart(instr->value()));
2201  return MarkAsCall(DefineFixed(result, rax), instr);
2202 }
2203 
2204 
2205 LInstruction* LChunkBuilder::DoTypeofIsAndBranch(HTypeofIsAndBranch* instr) {
2206  return new(zone()) LTypeofIsAndBranch(UseTempRegister(instr->value()));
2207 }
2208 
2209 
2210 LInstruction* LChunkBuilder::DoIsConstructCallAndBranch(
2211  HIsConstructCallAndBranch* instr) {
2212  return new(zone()) LIsConstructCallAndBranch(TempRegister());
2213 }
2214 
2215 
2216 LInstruction* LChunkBuilder::DoSimulate(HSimulate* instr) {
2217  HEnvironment* env = current_block_->last_environment();
2218  ASSERT(env != NULL);
2219 
2220  env->set_ast_id(instr->ast_id());
2221 
2222  env->Drop(instr->pop_count());
2223  for (int i = 0; i < instr->values()->length(); ++i) {
2224  HValue* value = instr->values()->at(i);
2225  if (instr->HasAssignedIndexAt(i)) {
2226  env->Bind(instr->GetAssignedIndexAt(i), value);
2227  } else {
2228  env->Push(value);
2229  }
2230  }
2231 
2232  // If there is an instruction pending deoptimization environment create a
2233  // lazy bailout instruction to capture the environment.
2234  if (pending_deoptimization_ast_id_ == instr->ast_id()) {
2235  LLazyBailout* lazy_bailout = new(zone()) LLazyBailout;
2236  LInstruction* result = AssignEnvironment(lazy_bailout);
2237  // Store the lazy deopt environment with the instruction if needed. Right
2238  // now it is only used for LInstanceOfKnownGlobal.
2239  instruction_pending_deoptimization_environment_->
2240  SetDeferredLazyDeoptimizationEnvironment(result->environment());
2241  instruction_pending_deoptimization_environment_ = NULL;
2242  pending_deoptimization_ast_id_ = AstNode::kNoNumber;
2243  return result;
2244  }
2245 
2246  return NULL;
2247 }
2248 
2249 
2250 LInstruction* LChunkBuilder::DoStackCheck(HStackCheck* instr) {
2251  if (instr->is_function_entry()) {
2252  return MarkAsCall(new(zone()) LStackCheck, instr);
2253  } else {
2254  ASSERT(instr->is_backwards_branch());
2255  return AssignEnvironment(AssignPointerMap(new(zone()) LStackCheck));
2256  }
2257 }
2258 
2259 
2260 LInstruction* LChunkBuilder::DoEnterInlined(HEnterInlined* instr) {
2261  HEnvironment* outer = current_block_->last_environment();
2262  HConstant* undefined = graph()->GetConstantUndefined();
2263  HEnvironment* inner = outer->CopyForInlining(instr->closure(),
2264  instr->arguments_count(),
2265  instr->function(),
2266  undefined,
2267  instr->call_kind(),
2268  instr->is_construct());
2269  if (instr->arguments_var() != NULL) {
2270  inner->Bind(instr->arguments_var(), graph()->GetArgumentsObject());
2271  }
2272  current_block_->UpdateEnvironment(inner);
2273  chunk_->AddInlinedClosure(instr->closure());
2274  return NULL;
2275 }
2276 
2277 
2278 LInstruction* LChunkBuilder::DoLeaveInlined(HLeaveInlined* instr) {
2279  LInstruction* pop = NULL;
2280 
2281  HEnvironment* env = current_block_->last_environment();
2282 
2283  if (instr->arguments_pushed()) {
2284  int argument_count = env->arguments_environment()->parameter_count();
2285  pop = new(zone()) LDrop(argument_count);
2286  argument_count_ -= argument_count;
2287  }
2288 
2289  HEnvironment* outer = current_block_->last_environment()->
2290  DiscardInlined(false);
2291  current_block_->UpdateEnvironment(outer);
2292 
2293  return pop;
2294 }
2295 
2296 
2297 LInstruction* LChunkBuilder::DoIn(HIn* instr) {
2298  LOperand* key = UseOrConstantAtStart(instr->key());
2299  LOperand* object = UseOrConstantAtStart(instr->object());
2300  LIn* result = new(zone()) LIn(key, object);
2301  return MarkAsCall(DefineFixed(result, rax), instr);
2302 }
2303 
2304 
2305 LInstruction* LChunkBuilder::DoForInPrepareMap(HForInPrepareMap* instr) {
2306  LOperand* object = UseFixed(instr->enumerable(), rax);
2307  LForInPrepareMap* result = new(zone()) LForInPrepareMap(object);
2308  return MarkAsCall(DefineFixed(result, rax), instr, CAN_DEOPTIMIZE_EAGERLY);
2309 }
2310 
2311 
2312 LInstruction* LChunkBuilder::DoForInCacheArray(HForInCacheArray* instr) {
2313  LOperand* map = UseRegister(instr->map());
2314  return AssignEnvironment(DefineAsRegister(
2315  new(zone()) LForInCacheArray(map)));
2316 }
2317 
2318 
2319 LInstruction* LChunkBuilder::DoCheckMapValue(HCheckMapValue* instr) {
2320  LOperand* value = UseRegisterAtStart(instr->value());
2321  LOperand* map = UseRegisterAtStart(instr->map());
2322  return AssignEnvironment(new(zone()) LCheckMapValue(value, map));
2323 }
2324 
2325 
2326 LInstruction* LChunkBuilder::DoLoadFieldByIndex(HLoadFieldByIndex* instr) {
2327  LOperand* object = UseRegister(instr->object());
2328  LOperand* index = UseTempRegister(instr->index());
2329  return DefineSameAsFirst(new(zone()) LLoadFieldByIndex(object, index));
2330 }
2331 
2332 
2333 } } // namespace v8::internal
2334 
2335 #endif // V8_TARGET_ARCH_X64
HValue * LookupValue(int id) const
Definition: hydrogen.h:310
const Register rdx
#define DEFINE_COMPILE(type)
Definition: lithium-arm.cc:37
static LUnallocated * cast(LOperand *op)
Definition: lithium.h:196
static LGap * cast(LInstruction *instr)
Definition: lithium-arm.h:318
static LConstantOperand * Create(int index, Zone *zone)
Definition: lithium.h:263
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:305
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:219
Handle< Object > name() const
Definition: lithium-arm.h:1726
const char * ToCString(const v8::String::Utf8Value &value)
virtual LOperand * InputAt(int i)=0
int GetParameterStackSlot(int index) const
Definition: lithium-arm.cc:497
void PrintF(const char *format,...)
Definition: v8utils.cc:40
static String * cast(Object *obj)
virtual void PrintOutputOperandTo(StringStream *stream)
Definition: lithium-arm.cc:120
Token::Value op() const
Definition: lithium-arm.h:1117
void MarkSpilledDoubleRegister(int allocation_index, LOperand *spill_operand)
Definition: lithium-arm.cc:84
LParallelMove * GetOrCreateParallelMove(InnerPosition pos, Zone *zone)
Definition: lithium-arm.h:336
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:279
int ParameterAt(int index)
Definition: lithium-arm.cc:508
value format" "after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false, "print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false, "print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false, "report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true, "garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true, "flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true, "use incremental marking") DEFINE_bool(incremental_marking_steps, true, "do incremental marking steps") DEFINE_bool(trace_incremental_marking, false, "trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true, "Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false, "Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true, "use inline caching") DEFINE_bool(native_code_counters, false, "generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false, "Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true, "Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false, "Never perform compaction on full GC-testing only") DEFINE_bool(compact_code_space, true, "Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true, "Flush inline caches prior to mark compact collection and" "flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0, "Default seed for initializing random generator" "(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true, "allows verbose printing") DEFINE_bool(allow_natives_syntax, false, "allow natives syntax") DEFINE_bool(trace_sim, false, "Trace simulator execution") DEFINE_bool(check_icache, false, "Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0, "Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8, "Stack alingment in bytes in simulator(4 or 8, 8 is default)") DEFINE_bool(trace_exception, false, "print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false, "preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true, "randomize hashes to avoid predictable hash collisions" "(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0, "Fixed seed to use to hash property keys(0 means random)" "(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false, "activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true, "generate optimized regexp code") DEFINE_bool(testing_bool_flag, true, "testing_bool_flag") DEFINE_int(testing_int_flag, 13, "testing_int_flag") DEFINE_float(testing_float_flag, 2.5, "float-flag") DEFINE_string(testing_string_flag, "Hello, world!", "string-flag") DEFINE_int(testing_prng_seed, 42, "Seed used for threading test randomness") DEFINE_string(testing_serialization_file, "/tmp/serdes", "file in which to serialize heap") DEFINE_bool(help, false, "Print usage message, including flags, on console") DEFINE_bool(dump_counters, false, "Dump counters on exit") DEFINE_string(map_counters, "", "Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT, "Pass all remaining arguments to the script.Alias for\"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#43"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2#define FLAG_MODE_DEFINE_DEFAULTS#1"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flag-definitions.h"1#define FLAG_FULL(ftype, ctype, nam, def, cmt)#define FLAG_READONLY(ftype, ctype, nam, def, cmt)#define DEFINE_implication(whenflag, thenflag)#define DEFINE_bool(nam, def, cmt)#define DEFINE_int(nam, def, cmt)#define DEFINE_float(nam, def, cmt)#define DEFINE_string(nam, def, cmt)#define DEFINE_args(nam, def, cmt)#define FLAG DEFINE_bool(use_strict, false,"enforce strict mode") DEFINE_bool(es5_readonly, false,"activate correct semantics for inheriting readonliness") DEFINE_bool(es52_globals, false,"activate new semantics for global var declarations") DEFINE_bool(harmony_typeof, false,"enable harmony semantics for typeof") DEFINE_bool(harmony_scoping, false,"enable harmony block scoping") DEFINE_bool(harmony_modules, false,"enable harmony modules (implies block scoping)") DEFINE_bool(harmony_proxies, false,"enable harmony proxies") DEFINE_bool(harmony_collections, false,"enable harmony collections (sets, maps, and weak maps)") DEFINE_bool(harmony, false,"enable all harmony features (except typeof)") DEFINE_implication(harmony, harmony_scoping) DEFINE_implication(harmony, harmony_modules) DEFINE_implication(harmony, harmony_proxies) DEFINE_implication(harmony, harmony_collections) DEFINE_implication(harmony_modules, harmony_scoping) DEFINE_bool(packed_arrays, false,"optimizes arrays that have no holes") DEFINE_bool(smi_only_arrays, true,"tracks arrays with only smi values") DEFINE_bool(clever_optimizations, true,"Optimize object size, Array shift, DOM strings and string +") DEFINE_bool(unbox_double_arrays, true,"automatically unbox arrays of doubles") DEFINE_bool(string_slices, true,"use string slices") DEFINE_bool(crankshaft, true,"use crankshaft") DEFINE_string(hydrogen_filter,"","optimization filter") DEFINE_bool(use_range, true,"use hydrogen range analysis") DEFINE_bool(eliminate_dead_phis, true,"eliminate dead phis") DEFINE_bool(use_gvn, true,"use hydrogen global value numbering") DEFINE_bool(use_canonicalizing, true,"use hydrogen instruction canonicalizing") DEFINE_bool(use_inlining, true,"use function inlining") DEFINE_int(max_inlined_source_size, 600,"maximum source size in bytes considered for a single inlining") DEFINE_int(max_inlined_nodes, 196,"maximum number of AST nodes considered for a single inlining") DEFINE_int(max_inlined_nodes_cumulative, 196,"maximum cumulative number of AST nodes considered for inlining") DEFINE_bool(loop_invariant_code_motion, true,"loop invariant code motion") DEFINE_bool(collect_megamorphic_maps_from_stub_cache, true,"crankshaft harvests type feedback from stub cache") DEFINE_bool(hydrogen_stats, false,"print statistics for hydrogen") DEFINE_bool(trace_hydrogen, false,"trace generated hydrogen to file") DEFINE_string(trace_phase,"Z","trace generated IR for specified phases") DEFINE_bool(trace_inlining, false,"trace inlining decisions") DEFINE_bool(trace_alloc, false,"trace register allocator") DEFINE_bool(trace_all_uses, false,"trace all use positions") DEFINE_bool(trace_range, false,"trace range analysis") DEFINE_bool(trace_gvn, false,"trace global value numbering") DEFINE_bool(trace_representation, false,"trace representation types") DEFINE_bool(stress_pointer_maps, false,"pointer map for every instruction") DEFINE_bool(stress_environments, false,"environment for every instruction") DEFINE_int(deopt_every_n_times, 0,"deoptimize every n times a deopt point is passed") DEFINE_bool(trap_on_deopt, false,"put a break point before deoptimizing") DEFINE_bool(deoptimize_uncommon_cases, true,"deoptimize uncommon cases") DEFINE_bool(polymorphic_inlining, true,"polymorphic inlining") DEFINE_bool(use_osr, true,"use on-stack replacement") DEFINE_bool(array_bounds_checks_elimination, false,"perform array bounds checks elimination") DEFINE_bool(array_index_dehoisting, false,"perform array index dehoisting") DEFINE_bool(trace_osr, false,"trace on-stack replacement") DEFINE_int(stress_runs, 0,"number of stress runs") DEFINE_bool(optimize_closures, true,"optimize closures") DEFINE_bool(inline_construct, true,"inline constructor calls") DEFINE_bool(inline_arguments, true,"inline functions with arguments object") DEFINE_int(loop_weight, 1,"loop weight for representation inference") DEFINE_bool(optimize_for_in, true,"optimize functions containing for-in loops") DEFINE_bool(experimental_profiler, true,"enable all profiler experiments") DEFINE_bool(watch_ic_patching, false,"profiler considers IC stability") DEFINE_int(frame_count, 1,"number of stack frames inspected by the profiler") DEFINE_bool(self_optimization, false,"primitive functions trigger their own optimization") DEFINE_bool(direct_self_opt, false,"call recompile stub directly when self-optimizing") DEFINE_bool(retry_self_opt, false,"re-try self-optimization if it failed") DEFINE_bool(count_based_interrupts, false,"trigger profiler ticks based on counting instead of timing") DEFINE_bool(interrupt_at_exit, false,"insert an interrupt check at function exit") DEFINE_bool(weighted_back_edges, false,"weight back edges by jump distance for interrupt triggering") DEFINE_int(interrupt_budget, 5900,"execution budget before interrupt is triggered") DEFINE_int(type_info_threshold, 15,"percentage of ICs that must have type info to allow optimization") DEFINE_int(self_opt_count, 130,"call count before self-optimization") DEFINE_implication(experimental_profiler, watch_ic_patching) DEFINE_implication(experimental_profiler, self_optimization) DEFINE_implication(experimental_profiler, retry_self_opt) DEFINE_implication(experimental_profiler, count_based_interrupts) DEFINE_implication(experimental_profiler, interrupt_at_exit) DEFINE_implication(experimental_profiler, weighted_back_edges) DEFINE_bool(trace_opt_verbose, false,"extra verbose compilation tracing") DEFINE_implication(trace_opt_verbose, trace_opt) DEFINE_bool(debug_code, false,"generate extra code (assertions) for debugging") DEFINE_bool(code_comments, false,"emit comments in code disassembly") DEFINE_bool(enable_sse2, true,"enable use of SSE2 instructions if available") DEFINE_bool(enable_sse3, true,"enable use of SSE3 instructions if available") DEFINE_bool(enable_sse4_1, true,"enable use of SSE4.1 instructions if available") DEFINE_bool(enable_cmov, true,"enable use of CMOV instruction if available") DEFINE_bool(enable_rdtsc, true,"enable use of RDTSC instruction if available") DEFINE_bool(enable_sahf, true,"enable use of SAHF instruction if available (X64 only)") DEFINE_bool(enable_vfp3, true,"enable use of VFP3 instructions if available - this implies ""enabling ARMv7 instructions (ARM only)") DEFINE_bool(enable_armv7, true,"enable use of ARMv7 instructions if available (ARM only)") DEFINE_bool(enable_fpu, true,"enable use of MIPS FPU instructions if available (MIPS only)") DEFINE_string(expose_natives_as, NULL,"expose natives in global object") DEFINE_string(expose_debug_as, NULL,"expose debug in global object") DEFINE_bool(expose_gc, false,"expose gc extension") DEFINE_bool(expose_externalize_string, false,"expose externalize string extension") DEFINE_int(stack_trace_limit, 10,"number of stack frames to capture") DEFINE_bool(builtins_in_stack_traces, false,"show built-in functions in stack traces") DEFINE_bool(disable_native_files, false,"disable builtin natives files") DEFINE_bool(inline_new, true,"use fast inline allocation") DEFINE_bool(stack_trace_on_abort, true,"print a stack trace if an assertion failure occurs") DEFINE_bool(trace, false,"trace function calls") DEFINE_bool(mask_constants_with_cookie, true,"use random jit cookie to mask large constants") DEFINE_bool(lazy, true,"use lazy compilation") DEFINE_bool(trace_opt, false,"trace lazy optimization") DEFINE_bool(trace_opt_stats, false,"trace lazy optimization statistics") DEFINE_bool(opt, true,"use adaptive optimizations") DEFINE_bool(always_opt, false,"always try to optimize functions") DEFINE_bool(prepare_always_opt, false,"prepare for turning on always opt") DEFINE_bool(trace_deopt, false,"trace deoptimization") DEFINE_int(min_preparse_length, 1024,"minimum length for automatic enable preparsing") DEFINE_bool(always_full_compiler, false,"try to use the dedicated run-once backend for all code") DEFINE_bool(trace_bailout, false,"print reasons for falling back to using the classic V8 backend") DEFINE_bool(compilation_cache, true,"enable compilation cache") DEFINE_bool(cache_prototype_transitions, true,"cache prototype transitions") DEFINE_bool(trace_debug_json, false,"trace debugging JSON request/response") DEFINE_bool(debugger_auto_break, true,"automatically set the debug break flag when debugger commands are ""in the queue") DEFINE_bool(enable_liveedit, true,"enable liveedit experimental feature") DEFINE_bool(break_on_abort, true,"always cause a debug break before aborting") DEFINE_int(stack_size, kPointerSize *123,"default size of stack region v8 is allowed to use (in kBytes)") DEFINE_int(max_stack_trace_source_length, 300,"maximum length of function source code printed in a stack trace.") DEFINE_bool(always_inline_smi_code, false,"always inline smi code in non-opt code") DEFINE_int(max_new_space_size, 0,"max size of the new generation (in kBytes)") DEFINE_int(max_old_space_size, 0,"max size of the old generation (in Mbytes)") DEFINE_int(max_executable_size, 0,"max size of executable memory (in Mbytes)") DEFINE_bool(gc_global, false,"always perform global GCs") DEFINE_int(gc_interval,-1,"garbage collect after <n> allocations") DEFINE_bool(trace_gc, false,"print one trace line following each garbage collection") DEFINE_bool(trace_gc_nvp, false,"print one detailed trace line in name=value format ""after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false,"print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false,"print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false,"report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true,"garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true,"flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true,"use incremental marking") DEFINE_bool(incremental_marking_steps, true,"do incremental marking steps") DEFINE_bool(trace_incremental_marking, false,"trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true,"Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false,"Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true,"use inline caching") DEFINE_bool(native_code_counters, false,"generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false,"Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true,"Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false,"Never perform compaction on full GC - testing only") DEFINE_bool(compact_code_space, true,"Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true,"Flush inline caches prior to mark compact collection and ""flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0,"Default seed for initializing random generator ""(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true,"allows verbose printing") DEFINE_bool(allow_natives_syntax, false,"allow natives syntax") DEFINE_bool(trace_sim, false,"Trace simulator execution") DEFINE_bool(check_icache, false,"Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0,"Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8,"Stack alingment in bytes in simulator (4 or 8, 8 is default)") DEFINE_bool(trace_exception, false,"print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false,"preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true,"randomize hashes to avoid predictable hash collisions ""(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0,"Fixed seed to use to hash property keys (0 means random)""(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false,"activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true,"generate optimized regexp code") DEFINE_bool(testing_bool_flag, true,"testing_bool_flag") DEFINE_int(testing_int_flag, 13,"testing_int_flag") DEFINE_float(testing_float_flag, 2.5,"float-flag") DEFINE_string(testing_string_flag,"Hello, world!","string-flag") DEFINE_int(testing_prng_seed, 42,"Seed used for threading test randomness") DEFINE_string(testing_serialization_file,"/tmp/serdes","file in which to serialize heap") DEFINE_bool(help, false,"Print usage message, including flags, on console") DEFINE_bool(dump_counters, false,"Dump counters on exit") DEFINE_string(map_counters,"","Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT,"Pass all remaining arguments to the script. Alias for \"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#47"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2 namespace{struct Flag{enum FlagType{TYPE_BOOL, TYPE_INT, TYPE_FLOAT, TYPE_STRING, TYPE_ARGS} name
Definition: flags.cc:1349
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:226
LLabel(HBasicBlock *block)
Definition: lithium-arm.h:400
Handle< String > name() const
Definition: lithium-arm.h:1542
static const int kNoNumber
Definition: ast.h:197
static const int kNumAllocatableRegisters
Handle< Object > name() const
Definition: lithium-arm.h:1705
LEnvironment * environment() const
Definition: lithium-arm.h:240
Token::Value op() const
Definition: lithium-arm.h:610
#define ASSERT(condition)
Definition: checks.h:270
virtual const char * Mnemonic() const =0
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:111
void PrintTo(StringStream *stream)
Definition: lithium.cc:203
bool IsSimpleMapChangeTransition(ElementsKind from_kind, ElementsKind to_kind)
#define LITHIUM_CONCRETE_INSTRUCTION_LIST(V)
Definition: lithium-arm.h:49
Representation representation() const
EqualityKind kind() const
Definition: lithium-arm.h:668
LGap * GetGapAt(int index) const
Definition: lithium-arm.cc:515
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:368
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:293
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:299
virtual bool HasResult() const =0
#define UNREACHABLE()
Definition: checks.h:50
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:324
int GetNextSpillIndex(bool is_double)
Definition: lithium-arm.cc:420
void PrintTo(StringStream *stream)
Definition: lithium.cc:158
Zone * zone() const
Definition: hydrogen.h:250
LLabel * replacement() const
Definition: lithium-arm.h:410
virtual const char * Mnemonic() const
Definition: lithium-arm.cc:156
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:201
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:330
void MarkSpilledRegister(int allocation_index, LOperand *spill_operand)
Definition: lithium-arm.cc:54
const XMMRegister xmm1
LOperand * GetNextSpillSlot(bool is_double)
Definition: lithium-arm.cc:427
void AddMove(LOperand *from, LOperand *to, Zone *zone)
Definition: lithium.h:401
static const char * String(Value tok)
Definition: token.h:275
const int kPointerSize
Definition: globals.h:234
static LDoubleStackSlot * Create(int index, Zone *zone)
Definition: lithium.h:324
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:336
const Register rbx
bool HasEnvironment() const
Definition: lithium-arm.h:241
static void VPrint(const char *format, va_list args)
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:341
virtual LOperand * result()=0
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:233
static int ToAllocationIndex(Register reg)
Definition: assembler-arm.h:77
Zone * zone() const
Definition: lithium-arm.h:2275
const Register rax
const Register rdi
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:395
virtual void PrintTo(StringStream *stream)
Definition: lithium-arm.cc:92
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:125
static LStackSlot * Create(int index, Zone *zone)
Definition: lithium.h:299
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:359
static const int kMaxFixedIndex
Definition: lithium.h:157
const XMMRegister xmm3
bool IsGapAt(int index) const
Definition: lithium-arm.cc:520
LOsrEntry()
Definition: lithium-arm.cc:44
LPointerMap * pointer_map() const
Definition: lithium-arm.h:244
const ZoneList< HBasicBlock * > * blocks() const
Definition: hydrogen.h:252
static int ToAllocationIndex(XMMRegister reg)
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:312
LLabel * GetLabel(int block_id) const
Definition: lithium-arm.h:2249
virtual DECLARE_CONCRETE_INSTRUCTION(StringCompareAndBranch,"string-compare-and-branch") Token void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:247
void AddInstruction(LInstruction *instruction, HBasicBlock *block)
Definition: lithium-arm.cc:473
HGraph * graph() const
Definition: lithium-arm.h:2241
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:145
int block_id() const
Definition: lithium-arm.h:369
void PrintDataTo(StringStream *stream) const
Definition: lithium.cc:137
virtual const char * Mnemonic() const
Definition: lithium-arm.cc:170
CompilationInfo * info() const
Definition: lithium-arm.h:2240
#define UNIMPLEMENTED()
Definition: checks.h:48
static const int kNumAllocatableRegisters
Definition: assembler-arm.h:74
const Register rcx
Token::Value op() const
Definition: lithium-arm.h:1140
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination trace on stack replacement optimize closures functions with arguments object optimize functions containing for in loops profiler considers IC stability primitive functions trigger their own optimization re try self optimization if it failed insert an interrupt check at function exit execution budget before interrupt is triggered call count before self optimization self_optimization count_based_interrupts weighted_back_edges trace_opt emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 enable use of ARMv7 instructions if enable use of MIPS FPU instructions if NULL
Definition: flags.cc:274
void AddGapMove(int index, LOperand *from, LOperand *to)
Definition: lithium-arm.cc:531
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:210
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:190
Handle< String > name() const
Definition: lithium-arm.h:1516
LConstantOperand * DefineConstantOperand(HConstant *constant)
Definition: lithium-arm.cc:492
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:195
Representation LookupLiteralRepresentation(LConstantOperand *operand) const
Definition: lithium-arm.cc:542
bool HasPointerMap() const
Definition: lithium-arm.h:245
int NearestGapPos(int index) const
Definition: lithium-arm.cc:525
bool IsRedundant() const
Definition: lithium-arm.cc:134
const XMMRegister xmm2
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:319
virtual int InputCount()=0
static HValue * cast(HValue *value)
Handle< String > type_literal()
Definition: lithium-arm.h:2088
FlagType type() const
Definition: flags.cc:1358
void PrintTo(StringStream *stream)
Definition: lithium.cc:35
Handle< Object > LookupLiteral(LConstantOperand *operand) const
Definition: lithium-arm.cc:537
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:288
const ZoneList< LInstruction * > * instructions() const
Definition: lithium-arm.h:2242
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:348